summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntonio Cunei <antonio.cunei@epfl.ch>2011-04-20 12:19:47 +0000
committerAntonio Cunei <antonio.cunei@epfl.ch>2011-04-20 12:19:47 +0000
commit517acfdd56af4562d3c4f2963f656f2834ca23e2 (patch)
tree609c95d6aafff786460300162991ce5b31fe8c32
parente5ad9c55f8a7a97aa7c36a057078db2031f468e8 (diff)
downloadscala-517acfdd56af4562d3c4f2963f656f2834ca23e2.tar.gz
scala-517acfdd56af4562d3c4f2963f656f2834ca23e2.tar.bz2
scala-517acfdd56af4562d3c4f2963f656f2834ca23e2.zip
Fixing an incomplete svnmerge; second, merge ag...
Fixing an incomplete svnmerge; second, merge again from trunk.
-rw-r--r--build.xml4
-rw-r--r--lib/jline.jar.desired.sha12
-rw-r--r--lib/msil.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--src/actors/scala/actors/Debug.scala2
-rw-r--r--src/actors/scala/actors/Future.scala6
-rw-r--r--src/actors/scala/actors/IScheduler.scala8
-rw-r--r--src/actors/scala/actors/MessageQueue.scala4
-rw-r--r--src/actors/scala/actors/Reaction.scala2
-rw-r--r--src/actors/scala/actors/Scheduler.scala2
-rw-r--r--src/actors/scala/actors/package.scala12
-rw-r--r--src/actors/scala/actors/remote/RemoteActor.scala4
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala5
-rw-r--r--src/actors/scala/actors/scheduler/TerminationMonitor.scala2
-rw-r--r--src/build/genprod.scala6
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala2
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala7
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala64
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/Index.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala33
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala18
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala42
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala53
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gifbin0 -> 1206 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gifbin0 -> 1544 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gifbin0 -> 1341 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin481 -> 1692 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gifbin0 -> 1462 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin533 -> 1803 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gifbin0 -> 1324 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gifbin0 -> 1104 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.pngbin0 -> 965 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gifbin0 -> 1366 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gifbin0 -> 1115 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css153
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js79
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.pngbin0 -> 1198 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.pngbin0 -> 2441 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gifbin0 -> 1145 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gifbin0 -> 1118 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gifbin0 -> 1145 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gifbin0 -> 1201 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.pngbin0 -> 1380 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.pngbin0 -> 1864 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.pngbin0 -> 1434 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.pngbin0 -> 1965 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gifbin0 -> 1214 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gifbin0 -> 1209 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css229
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js8
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gifbin0 -> 1206 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.pngbin0 -> 1879 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gifbin0 -> 1206 bytes
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala17
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala7
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala23
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala50
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala144
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala34
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala74
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala2
-rw-r--r--src/jline/pom.xml273
-rw-r--r--src/jline/project/build.properties4
-rw-r--r--src/jline/project/plugins/project/build.properties2
-rw-r--r--src/jline/src/main/java/scala/tools/jline/TerminalSupport.java10
-rw-r--r--src/jline/src/main/java/scala/tools/jline/UnixTerminal.java15
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java46
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/Key.java6
-rw-r--r--src/jline/src/main/resources/scala/tools/jline/keybindings.properties3
-rw-r--r--src/jline/src/main/resources/scala/tools/jline/macbindings.properties62
-rw-r--r--src/library/scala/Application.scala2
-rw-r--r--src/library/scala/Array.scala28
-rw-r--r--src/library/scala/Cell.scala2
-rw-r--r--src/library/scala/CountedIterator.scala2
-rw-r--r--src/library/scala/Double.scala2
-rw-r--r--src/library/scala/Either.scala6
-rw-r--r--src/library/scala/Float.scala2
-rw-r--r--src/library/scala/Function.scala8
-rw-r--r--src/library/scala/Function0.scala2
-rw-r--r--src/library/scala/Function10.scala2
-rw-r--r--src/library/scala/Function11.scala2
-rw-r--r--src/library/scala/Function12.scala2
-rw-r--r--src/library/scala/Function13.scala2
-rw-r--r--src/library/scala/Function14.scala2
-rw-r--r--src/library/scala/Function15.scala2
-rw-r--r--src/library/scala/Function16.scala2
-rw-r--r--src/library/scala/Function17.scala2
-rw-r--r--src/library/scala/Function18.scala2
-rw-r--r--src/library/scala/Function19.scala2
-rw-r--r--src/library/scala/Function2.scala2
-rw-r--r--src/library/scala/Function20.scala2
-rw-r--r--src/library/scala/Function21.scala2
-rw-r--r--src/library/scala/Function22.scala2
-rw-r--r--src/library/scala/Function3.scala2
-rw-r--r--src/library/scala/Function4.scala2
-rw-r--r--src/library/scala/Function5.scala2
-rw-r--r--src/library/scala/Function6.scala2
-rw-r--r--src/library/scala/Function7.scala2
-rw-r--r--src/library/scala/Function8.scala2
-rw-r--r--src/library/scala/Function9.scala2
-rw-r--r--src/library/scala/Math.scala46
-rw-r--r--src/library/scala/NotDefinedError.scala2
-rw-r--r--src/library/scala/Predef.scala10
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/Tuple3.scala2
-rw-r--r--src/library/scala/annotation/serializable.scala2
-rw-r--r--src/library/scala/collection/CustomParallelizable.scala3
-rw-r--r--src/library/scala/collection/GenIterable.scala36
-rw-r--r--src/library/scala/collection/GenIterableLike.scala143
-rw-r--r--src/library/scala/collection/GenIterableView.scala18
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala83
-rw-r--r--src/library/scala/collection/GenMap.scala36
-rw-r--r--src/library/scala/collection/GenMapLike.scala64
-rw-r--r--src/library/scala/collection/GenSeq.scala36
-rw-r--r--src/library/scala/collection/GenSeqLike.scala410
-rw-r--r--src/library/scala/collection/GenSeqView.scala18
-rw-r--r--src/library/scala/collection/GenSeqViewLike.scala164
-rw-r--r--src/library/scala/collection/GenSet.scala37
-rw-r--r--src/library/scala/collection/GenSetLike.scala131
-rw-r--r--src/library/scala/collection/GenTraversable.scala39
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala333
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala510
-rw-r--r--src/library/scala/collection/GenTraversableView.scala18
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala141
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala8
-rw-r--r--src/library/scala/collection/Iterable.scala13
-rw-r--r--src/library/scala/collection/IterableLike.scala117
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala6
-rw-r--r--src/library/scala/collection/IterableView.scala2
-rw-r--r--src/library/scala/collection/IterableViewLike.scala92
-rw-r--r--src/library/scala/collection/Iterator.scala32
-rw-r--r--src/library/scala/collection/JavaConversions.scala68
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala22
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala2
-rw-r--r--src/library/scala/collection/Map.scala4
-rw-r--r--src/library/scala/collection/MapLike.scala43
-rw-r--r--src/library/scala/collection/MapProxyLike.scala2
-rw-r--r--src/library/scala/collection/Parallel.scala4
-rw-r--r--src/library/scala/collection/Parallelizable.scala6
-rw-r--r--src/library/scala/collection/Seq.scala5
-rw-r--r--src/library/scala/collection/SeqLike.scala331
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala26
-rw-r--r--src/library/scala/collection/SeqView.scala2
-rw-r--r--src/library/scala/collection/SeqViewLike.scala160
-rw-r--r--src/library/scala/collection/Set.scala8
-rw-r--r--src/library/scala/collection/SetLike.scala94
-rw-r--r--src/library/scala/collection/SetProxyLike.scala14
-rw-r--r--src/library/scala/collection/SortedSetLike.scala2
-rw-r--r--src/library/scala/collection/Traversable.scala12
-rw-r--r--src/library/scala/collection/TraversableLike.scala217
-rw-r--r--src/library/scala/collection/TraversableOnce.scala357
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala4
-rw-r--r--src/library/scala/collection/TraversableView.scala5
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala139
-rw-r--r--src/library/scala/collection/generic/Addable.scala4
-rw-r--r--src/library/scala/collection/generic/CanCombineFrom.scala9
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala11
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala9
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala16
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/HasNewCombiner.scala9
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala6
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala11
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala9
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala9
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala18
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/Signalling.scala8
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala2
-rw-r--r--src/library/scala/collection/immutable/GenIterable.scala.disabled37
-rw-r--r--src/library/scala/collection/immutable/GenMap.scala.disabled36
-rw-r--r--src/library/scala/collection/immutable/GenSeq.scala.disabled49
-rw-r--r--src/library/scala/collection/immutable/GenSet.scala.disabled43
-rw-r--r--src/library/scala/collection/immutable/GenTraversable.scala.disabled41
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala47
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala40
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala5
-rw-r--r--src/library/scala/collection/immutable/List.scala54
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala4
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala4
-rw-r--r--src/library/scala/collection/immutable/Map.scala3
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala4
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala2
-rw-r--r--src/library/scala/collection/immutable/Queue.scala6
-rw-r--r--src/library/scala/collection/immutable/Range.scala2
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala9
-rw-r--r--src/library/scala/collection/immutable/Seq.scala6
-rw-r--r--src/library/scala/collection/immutable/Set.scala8
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala4
-rw-r--r--src/library/scala/collection/immutable/Stack.scala2
-rw-r--r--src/library/scala/collection/immutable/Stream.scala23
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala12
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala2
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala219
-rw-r--r--src/library/scala/collection/immutable/TrieIteratorBase.scala184
-rw-r--r--src/library/scala/collection/immutable/Vector.scala10
-rw-r--r--src/library/scala/collection/interfaces/IterableMethods.scala6
-rw-r--r--src/library/scala/collection/interfaces/MapMethods.scala2
-rw-r--r--src/library/scala/collection/interfaces/SeqMethods.scala2
-rw-r--r--src/library/scala/collection/interfaces/TraversableMethods.scala4
-rw-r--r--src/library/scala/collection/mutable/AddingBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala8
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala3
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala2
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala10
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala6
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala1
-rw-r--r--src/library/scala/collection/mutable/GenIterable.scala.disabled37
-rw-r--r--src/library/scala/collection/mutable/GenMap.scala.disabled40
-rw-r--r--src/library/scala/collection/mutable/GenSeq.scala.disabled44
-rw-r--r--src/library/scala/collection/mutable/GenSet.scala.disabled46
-rw-r--r--src/library/scala/collection/mutable/GenTraversable.scala.disabled38
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala3
-rw-r--r--src/library/scala/collection/mutable/Iterable.scala5
-rw-r--r--src/library/scala/collection/mutable/Map.scala1
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala10
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala14
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala8
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/mutable/Seq.scala7
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala3
-rw-r--r--src/library/scala/collection/mutable/Set.scala1
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala6
-rw-r--r--src/library/scala/collection/mutable/Stack.scala2
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala14
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala8
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala4
-rw-r--r--src/library/scala/collection/mutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala5
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala12
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala406
-rw-r--r--src/library/scala/collection/parallel/ParIterableView.scala9
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala80
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala7
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala15
-rw-r--r--src/library/scala/collection/parallel/ParSeq.scala13
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala170
-rw-r--r--src/library/scala/collection/parallel/ParSeqView.scala6
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala79
-rw-r--r--src/library/scala/collection/parallel/ParSet.scala21
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala46
-rw-r--r--src/library/scala/collection/parallel/PreciseSplitter.scala64
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala141
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala51
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala33
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala49
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala5
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala10
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala15
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala18
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala14
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala13
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala19
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala11
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala9
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala19
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala111
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala106
-rw-r--r--src/library/scala/collection/parallel/package.scala16
-rw-r--r--src/library/scala/concurrent/MailBox.scala8
-rw-r--r--src/library/scala/concurrent/TIMEOUT.scala2
-rw-r--r--src/library/scala/concurrent/ops.scala2
-rw-r--r--src/library/scala/concurrent/pilib.scala2
-rw-r--r--src/library/scala/deprecated.scala6
-rw-r--r--src/library/scala/io/Position.scala7
-rw-r--r--src/library/scala/io/Source.scala2
-rw-r--r--src/library/scala/io/UTF8Codec.scala12
-rw-r--r--src/library/scala/math/BigDecimal.scala4
-rw-r--r--src/library/scala/math/BigInt.scala4
-rw-r--r--src/library/scala/package.scala88
-rw-r--r--src/library/scala/reflect/generic/ByteCodecs.scala4
-rw-r--r--src/library/scala/reflect/generic/HasFlags.scala3
-rw-r--r--src/library/scala/runtime/AnyValCompanion.scala2
-rw-r--r--src/library/scala/runtime/RichChar.scala8
-rw-r--r--src/library/scala/runtime/package.scala18
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala6
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala9
-rw-r--r--src/library/scala/testing/SUnit.scala3
-rw-r--r--src/library/scala/util/MurmurHash.scala4
-rw-r--r--src/library/scala/util/Properties.scala39
-rw-r--r--src/library/scala/util/parsing/json/JSON.scala3
-rw-r--r--src/library/scala/util/parsing/syntax/package.scala6
-rw-r--r--src/library/scala/xml/HasKeyValue.scala2
-rw-r--r--src/library/scala/xml/XML.scala4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Assembly.java6
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEFile.java23
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala14
-rw-r--r--test/disabled/jvm/JavaInteraction.check (renamed from test/files/jvm/JavaInteraction.check)0
-rw-r--r--test/disabled/jvm/JavaInteraction.scala (renamed from test/files/jvm/JavaInteraction.scala)0
-rw-r--r--test/disabled/presentation/simple-tests.check (renamed from test/files/presentation/simple-tests.check)0
-rw-r--r--test/disabled/presentation/simple-tests.javaopts (renamed from test/files/presentation/simple-tests.javaopts)0
-rw-r--r--test/disabled/presentation/simple-tests.opts (renamed from test/files/presentation/simple-tests.opts)0
-rw-r--r--test/disabled/scalacheck/HashTrieSplit.scala47
-rw-r--r--test/files/jvm/ticket4283/AbstractFoo.java5
-rw-r--r--test/files/jvm/ticket4283/ScalaBipp.scala5
-rw-r--r--test/files/jvm/ticket4283/Test.scala4
-rwxr-xr-xtest/files/neg/t3115.scala2
-rw-r--r--test/files/neg/t3774.check4
-rw-r--r--test/files/pos/bug1071.scala17
-rw-r--r--test/files/pos/bug4275.scala13
-rw-r--r--test/files/pos/spec-List.scala2
-rw-r--r--test/files/pos/t2799.scala2
-rw-r--r--test/files/pos/t4402/A.scala3
-rw-r--r--test/files/pos/t4402/Bar.java7
-rw-r--r--test/files/pos/t4402/Foo.java8
-rw-r--r--test/files/pos/t4432.scala42
-rw-r--r--test/files/run/pc-conversions.scala13
-rw-r--r--test/files/run/t4426.scala24
-rw-r--r--test/files/run/t4459.scala12
-rw-r--r--test/files/scalacheck/HashTrieSplit.scala4
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala6
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala10
-rw-r--r--test/pending/run/t4283/AbstractFoo.java6
-rw-r--r--test/pending/run/t4283/IllegalAccess.scala17
-rw-r--r--test/pending/run/t4283/ScalaBipp.scala5
-rw-r--r--test/scaladoc/resources/Trac4452.scala30
-rw-r--r--test/scaladoc/resources/Trac4471.scala9
-rw-r--r--test/scaladoc/scala/html/HtmlFactoryTest.scala40
-rwxr-xr-xtools/deploy-local-maven-snapshot11
361 files changed, 5926 insertions, 3478 deletions
diff --git a/build.xml b/build.xml
index 1943a36089..b94fccdc08 100644
--- a/build.xml
+++ b/build.xml
@@ -375,7 +375,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<include name="**/*.properties"/>
<include name="**/*.swf"/>
<include name="**/*.png"/>
-
+ <include name="**/*.gif"/>
</fileset>
</copy>
<touch file="${build-locker.dir}/compiler.complete" verbose="no"/>
@@ -590,6 +590,7 @@ QUICK BUILD (QUICK)
<include name="**/*.properties"/>
<include name="**/*.swf"/>
<include name="**/*.png"/>
+ <include name="**/*.gif"/>
</fileset>
</copy>
<touch file="${build-quick.dir}/compiler.complete" verbose="no"/>
@@ -1101,6 +1102,7 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.properties"/>
<include name="**/*.swf"/>
<include name="**/*.png"/>
+ <include name="**/*.gif"/>
</fileset>
</copy>
<touch file="${build-strap.dir}/compiler.complete" verbose="no"/>
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
index ba6e62ff61..a0f97c4d7a 100644
--- a/lib/jline.jar.desired.sha1
+++ b/lib/jline.jar.desired.sha1
@@ -1 +1 @@
-32f4eb98f8135b0d565e4e7aa72550f34fc4c133 ?jline.jar
+115e0f58e2ffa089c083c466b0161216dd4c916e ?jline.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
index 6a95abf8e6..7dd6b5d66b 100644
--- a/lib/msil.jar.desired.sha1
+++ b/lib/msil.jar.desired.sha1
@@ -1 +1 @@
-411bfee5f3b2b6bae5f6ac06f84b4f4624370492 ?msil.jar
+58f64cd00399c724e7d526e5bdcbce3e2b79f78b ?msil.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index 036af075b1..d2eb0e93e3 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-4b2f54712dab89071f1909f07367673635beffda ?scala-compiler.jar
+e0382dfa2712e567b49f5826ac50eae59cf03cb3 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index aa7dbdc6bd..1b5b949434 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-f015b1389419b997796a3a9814bedac6343f108e ?scala-library-src.jar
+5ad90238f4fc57e8147fb69496c68d02ca073ca0 ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 57adbeffc5..b0aa256cb4 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-af0438e6911f357a2af0d068482baa1f526dbac0 ?scala-library.jar
+678ada3690dd149c2bd302407e315a74de0bfd42 ?scala-library.jar
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index 17af47310d..30d5ae5703 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -43,5 +43,5 @@ private[actors] class Logger(tag: String) {
if (lev > 0) b
}
-@deprecated("this class is going to be removed in a future release")
+@deprecated("this class is going to be removed in a future release", "2.7.7")
class Debug(tag: String) extends Logger(tag) {}
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 20e2de47a5..c6b575d0ee 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -27,12 +27,12 @@ abstract class Future[+T] extends Responder[T] with Function0[T] {
private[actors] var fvalue: Option[Any] = None
private[actors] def fvalueTyped = fvalue.get.asInstanceOf[T]
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.8.0")
def ch: InputChannel[Any] = inputChannel
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.8.0")
protected def value: Option[Any] = fvalue
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.8.0")
protected def value_=(x: Option[Any]) { fvalue = x }
/** Tests whether the future's result is available.
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 5e0b6c0d0c..865ead58a0 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -67,16 +67,16 @@ trait IScheduler {
def managedBlock(blocker: scala.concurrent.ManagedBlocker): Unit
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.7.7")
def tick(a: Actor) {}
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.7.7")
def onLockup(handler: () => Unit) {}
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.7.7")
def onLockup(millis: Int)(handler: () => Unit) {}
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.7.7")
def printActorDump {}
}
diff --git a/src/actors/scala/actors/MessageQueue.scala b/src/actors/scala/actors/MessageQueue.scala
index 0e5e62b1b5..777735df23 100644
--- a/src/actors/scala/actors/MessageQueue.scala
+++ b/src/actors/scala/actors/MessageQueue.scala
@@ -16,7 +16,7 @@ package scala.actors
* @author Philipp Haller
*/
@SerialVersionUID(7124278808020037465L)
-@deprecated("this class is going to be removed in a future release")
+@deprecated("this class is going to be removed in a future release", "2.7.7")
class MessageQueueElement(msg: Any, session: OutputChannel[Any], next: MessageQueueElement) extends MQueueElement[Any](msg, session, next) with Serializable {
def this() = this(null, null, null)
def this(msg: Any, session: OutputChannel[Any]) = this(msg, session, null)
@@ -36,7 +36,7 @@ private[actors] class MQueueElement[Msg >: Null](val msg: Msg, val session: Outp
* @author Philipp Haller
*/
@SerialVersionUID(2168935872884095767L)
-@deprecated("this class is going to be removed in a future release")
+@deprecated("this class is going to be removed in a future release", "2.7.7")
class MessageQueue(label: String) extends MQueue[Any](label) with Serializable
private[actors] class MQueue[Msg >: Null](protected val label: String) {
diff --git a/src/actors/scala/actors/Reaction.scala b/src/actors/scala/actors/Reaction.scala
index 6b8efc22fe..e94d42005d 100644
--- a/src/actors/scala/actors/Reaction.scala
+++ b/src/actors/scala/actors/Reaction.scala
@@ -24,7 +24,7 @@ private[actors] class KillActorControl extends ControlThrowable
*
* @author Philipp Haller
*/
-@deprecated("This class will be removed in a future release")
+@deprecated("This class will be removed in a future release", "2.7.7")
class Reaction(a: Actor, f: PartialFunction[Any, Any], msg: Any)
extends ActorTask(a, if (f == null) (() => a.act()) else null, f, msg) {
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index 8d0cac44ea..3bf1b33abf 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -40,7 +40,7 @@ object Scheduler extends DelegatingScheduler {
/* Only <code>ForkJoinScheduler</code> implements this method.
*/
- @deprecated("snapshot will be removed")
+ @deprecated("snapshot will be removed", "2.8.0")
def snapshot() {
if (sched.isInstanceOf[ForkJoinScheduler]) {
sched.asInstanceOf[ForkJoinScheduler].snapshot()
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index 55bc8e113f..1fd9fd7699 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -20,26 +20,26 @@ package object actors {
// type of Reactors tracked by termination detector
private[actors] type TrackedReactor = Reactor[A] forSome { type A >: Null }
- @deprecated("use scheduler.ForkJoinScheduler instead")
+ @deprecated("use scheduler.ForkJoinScheduler instead", "2.8.0")
type FJTaskScheduler2 = scala.actors.scheduler.ForkJoinScheduler
- @deprecated("use scheduler.ForkJoinScheduler instead")
+ @deprecated("use scheduler.ForkJoinScheduler instead", "2.8.0")
type TickedScheduler = scala.actors.scheduler.ForkJoinScheduler
- @deprecated("use scheduler.ForkJoinScheduler instead")
+ @deprecated("use scheduler.ForkJoinScheduler instead", "2.8.0")
type WorkerThreadScheduler = scala.actors.scheduler.ForkJoinScheduler
- @deprecated("this class is going to be removed in a future release")
+ @deprecated("this class is going to be removed in a future release", "2.8.0")
type WorkerThread = java.lang.Thread
- @deprecated("use scheduler.SingleThreadedScheduler instead")
+ @deprecated("use scheduler.SingleThreadedScheduler instead", "2.8.0")
type SingleThreadedScheduler = scala.actors.scheduler.SingleThreadedScheduler
// This used to do a blind cast and throw a CCE after the package
// object was loaded. I have replaced with a variation that should work
// in whatever cases that was working but fail less exceptionally for
// those not intentionally using it.
- @deprecated("this value is going to be removed in a future release")
+ @deprecated("this value is going to be removed in a future release", "2.8.0")
val ActorGC = scala.actors.Scheduler.impl match {
case x: scala.actors.scheduler.ActorGC => x
case _ => null
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
index df2d8610b1..f6ef62bc10 100644
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ b/src/actors/scala/actors/remote/RemoteActor.scala
@@ -81,7 +81,7 @@ object RemoteActor {
kern
}
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.8.0")
def createKernelOnPort(port: Int): NetKernel =
createNetKernelOnPort(port)
@@ -121,7 +121,7 @@ object RemoteActor {
private[remote] def someNetKernel: NetKernel =
kernels.valuesIterator.next
- @deprecated("this member is going to be removed in a future release")
+ @deprecated("this member is going to be removed in a future release", "2.8.0")
def someKernel: NetKernel =
someNetKernel
}
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index 5556afc802..b310478457 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -99,16 +99,17 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
// we are not connected, yet
try {
val newWorker = connect(node)
- newWorker transmit data
// any pending sends?
pendingSends.get(node) match {
case None =>
// do nothing
case Some(msgs) =>
- msgs foreach {newWorker transmit _}
+ msgs.reverse foreach {newWorker transmit _}
pendingSends -= node
}
+
+ newWorker transmit data
} catch {
case uhe: UnknownHostException =>
bufferMsg(uhe)
diff --git a/src/actors/scala/actors/scheduler/TerminationMonitor.scala b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
index c7e4cdda37..e4b9743ab5 100644
--- a/src/actors/scala/actors/scheduler/TerminationMonitor.scala
+++ b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
@@ -66,7 +66,7 @@ private[scheduler] trait TerminationMonitor {
}
/** Deprecated non-actor-private version */
- @deprecated("this method is going to be removed in a future release")
+ @deprecated("this method is going to be removed in a future release", "2.7.7")
def allTerminated: Boolean = allActorsTerminated
/** Checks for actors that have become garbage. */
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index d587d0464d..315af55d41 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -238,7 +238,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
curryComment +
" def curried: %s => R = {\n %s\n }\n".format(
targs mkString " => ", body
- ) + """ @deprecated("Use 'curried' instead")""" + "\n def curry = curried\n"
+ ) + """ @deprecated("Use 'curried' instead", "2.8.0")""" + "\n def curry = curried\n"
}
override def moreMethods = curryMethod + tupleMethod
@@ -280,7 +280,7 @@ object TupleTwo extends Tuple(2)
*/
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
- @deprecated("Use `zipped` instead.")
+ @deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, To](implicit w1: T1 => TLike[El1, Repr1],
w2: T2 => Iterable[El2],
cbf1: CBF[Repr1, (El1, El2), To]): To = {
@@ -387,7 +387,7 @@ object TupleThree extends Tuple(3) {
override def imports = Tuple.zipImports
override def moreMethods = """
- @deprecated("Use `zipped` instead.")
+ @deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
w2: T2 => Iterable[El2],
w3: T3 => Iterable[El3],
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index a1280eec8e..9f8e488c43 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -206,7 +206,7 @@ final val NaN = @boxed@.NaN
final val PositiveInfinity = @boxed@.POSITIVE_INFINITY
final val NegativeInfinity = @boxed@.NEGATIVE_INFINITY
-@deprecated("use @name@.MinPositiveValue instead")
+@deprecated("use @name@.MinPositiveValue instead", "2.9.0")
final val Epsilon = MinPositiveValue
/** The negative number with the greatest (finite) absolute value which is representable
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 1e2a367176..a0c39f71fb 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -95,7 +95,12 @@ class CompileSocket extends CompileOutputCommon {
private def startNewServer(vmArgs: String) = {
val cmd = serverCommand(vmArgs split " " toSeq)
info("[Executing command: %s]" format cmd.mkString(" "))
- cmd.daemonized().run()
+
+ // Hiding inadequate daemonized implementation from public API for now
+ Process(cmd) match {
+ case x: ProcessBuilder.AbstractBuilder => x.daemonized().run()
+ case x => x.run()
+ }
}
/** The port identification file */
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 2e97a3d820..9df0069ada 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -328,13 +328,18 @@ trait DocComments { self: SymbolTable =>
pos withStart start1 withPoint start1 withEnd end1
}
- def defineVariables(sym: Symbol) {
- for (str <- defines) {
- val start = skipWhitespace(str, "@define".length)
- var idx = skipVariable(str, start)
- val vble = variableName(str.substring(start, idx))
-
- defs(sym) += vble -> (str drop idx).trim.replaceAll("""\s+\*+$""", "")
+ def defineVariables(sym: Symbol) = {
+ val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r
+
+ defs(sym) ++= defines.map {
+ str => {
+ val start = skipWhitespace(str, "@define".length)
+ val (key, value) = str.splitAt(skipVariable(str, start))
+ key.drop(start) -> value
+ }
+ } map {
+ case (key, Trim(value)) =>
+ variableName(key) -> value.replaceAll("\\s+\\*+$", "")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 249aefb10e..540c5f50dd 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -991,6 +991,7 @@ abstract class GenICode extends SubComponent {
case Select(qualifier, selector) =>
val sym = tree.symbol
generatedType = toTypeKind(sym.info)
+ val hostClass = qualifier.tpe.typeSymbol.orElse(sym.owner)
if (sym.isModule) {
if (settings.debug.value)
@@ -999,11 +1000,11 @@ abstract class GenICode extends SubComponent {
genLoadModule(ctx, sym, tree.pos)
ctx
} else if (sym.isStaticMember) {
- ctx.bb.emit(LOAD_FIELD(sym, true), tree.pos)
+ ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
ctx
} else {
val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, false), tree.pos)
+ ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
ctx1
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index a46de06d6f..ba33c425f2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -171,6 +171,11 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
override def producedTypes = List(toTypeKind(field.tpe));
+
+ // more precise information about how to load this field
+ // see #4283
+ var hostClass: Symbol = field.owner
+ def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
}
case class LOAD_MODULE(module: Symbol) extends Instruction {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 2305e22ed2..ac0cb97d45 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1172,8 +1172,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid {
case LOAD_LOCAL(local) =>
jcode.emitLOAD(indexOf(local), javaType(local.kind))
- case LOAD_FIELD(field, isStatic) =>
- var owner = javaName(field.owner)
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
if (settings.debug.value)
log("LOAD_FIELD with owner: " + owner +
" flags: " + Flags.flagsToString(field.owner.flags))
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index a38916b8e4..d593a13d8b 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -108,14 +108,14 @@ abstract class GenMSIL extends SubComponent {
val objParam = Array(MOBJECT)
-// val toBool: MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam)
- val toSByte: MethodInfo = SystemConvert.GetMethod("ToSByte", objParam)
- val toShort: MethodInfo = SystemConvert.GetMethod("ToInt16", objParam)
- val toChar: MethodInfo = SystemConvert.GetMethod("ToChar", objParam)
- val toInt: MethodInfo = SystemConvert.GetMethod("ToInt32", objParam)
- val toLong: MethodInfo = SystemConvert.GetMethod("ToInt64", objParam)
- val toFloat: MethodInfo = SystemConvert.GetMethod("ToSingle", objParam)
- val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble", objParam)
+ val toBool: MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam) // see comment in emitUnbox
+ val toSByte: MethodInfo = SystemConvert.GetMethod("ToSByte", objParam)
+ val toShort: MethodInfo = SystemConvert.GetMethod("ToInt16", objParam)
+ val toChar: MethodInfo = SystemConvert.GetMethod("ToChar", objParam)
+ val toInt: MethodInfo = SystemConvert.GetMethod("ToInt32", objParam)
+ val toLong: MethodInfo = SystemConvert.GetMethod("ToInt64", objParam)
+ val toFloat: MethodInfo = SystemConvert.GetMethod("ToSingle", objParam)
+ val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble", objParam)
//val boxedUnit: FieldInfo = msilType(definitions.BoxedUnitModule.info).GetField("UNIT")
val boxedUnit: FieldInfo = fields(definitions.BoxedUnit_UNIT)
@@ -460,9 +460,8 @@ abstract class GenMSIL extends SubComponent {
}
private[GenMSIL] def ilasmFileName(iclass: IClass) : String = {
- val singleBackslashed = iclass.cunit.source.file.toString
- val doubleBackslashed = singleBackslashed.replace("\\", "\\\\")
- doubleBackslashed
+ // method.sourceFile contains just the filename
+ iclass.cunit.source.file.toString.replace("\\", "\\\\")
}
private[GenMSIL] def genClass(iclass: IClass) {
@@ -564,6 +563,10 @@ abstract class GenMSIL extends SubComponent {
val labels: HashMap[BasicBlock, Label] = new HashMap()
+ /* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize.
+ * this scheme relies on the fact that the entry block is emitted first. */
+ var dbFilenameSeen = false
+
def genCode(m: IMethod) {
def makeLabels(blocks: List[BasicBlock]) = {
@@ -587,6 +590,7 @@ abstract class GenMSIL extends SubComponent {
// debug val MButNotL = (blocksInM.toSet) diff (blocksInL.toSet) // if non-empty, a jump to B fails to find a label for B (case CJUMP, case CZJUMP)
// debug if(!MButNotL.isEmpty) { }
+ dbFilenameSeen = false
genBlocks(linearization)
// RETURN inside exception blocks are replaced by Leave. The target of the
@@ -897,6 +901,7 @@ abstract class GenMSIL extends SubComponent {
var previousWasNEW = false
var lastLineNr: Int = 0
+ var lastPos: Position = NoPosition
// EndExceptionBlock must happen before MarkLabel because it adds the
@@ -932,18 +937,24 @@ abstract class GenMSIL extends SubComponent {
}
for (instr <- block) {
- val currentLineNr = try {
- instr.pos.line
- } catch {
- case _: UnsupportedOperationException =>
- log("Warning: wrong position in: " + method)
- lastLineNr
- }
-
- if (currentLineNr != lastLineNr) {
- mcode.setPosition(currentLineNr, ilasmFileName(clasz)) // method.sourceFile contains just the filename
- lastLineNr = currentLineNr
- }
+ try {
+ val currentLineNr = instr.pos.line
+ val skip = if(instr.pos.isRange) instr.pos.sameRange(lastPos) else (currentLineNr == lastLineNr);
+ if(!skip || !dbFilenameSeen) {
+ val fileName = if(dbFilenameSeen) "" else {dbFilenameSeen = true; ilasmFileName(clasz)};
+ if(instr.pos.isRange) {
+ val startLine = instr.pos.focusStart.line
+ val endLine = instr.pos.focusEnd.line
+ val startCol = instr.pos.focusStart.column
+ val endCol = instr.pos.focusEnd.column
+ mcode.setPosition(startLine, endLine, startCol, endCol, fileName)
+ } else {
+ mcode.setPosition(instr.pos.line, fileName)
+ }
+ lastLineNr = currentLineNr
+ lastPos = instr.pos
+ }
+ } catch { case _: UnsupportedOperationException => () }
if (previousWasNEW)
assert(instr.isInstanceOf[DUP], block)
@@ -2194,7 +2205,12 @@ abstract class GenMSIL extends SubComponent {
def emitUnbox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
case UNIT => code.Emit(OpCodes.Pop)
- case BOOL => code.Emit(OpCodes.Unbox, MBOOL); code.Emit(OpCodes.Ldind_I1)
+ /* (1) it's essential to keep the code emitted here (as of now plain calls to System.Convert.ToBlaBla methods)
+ behaviorally.equiv.wrt. BoxesRunTime.unboxToBlaBla methods
+ (case null: that's easy, case boxed: track changes to unboxBlaBla)
+ (2) See also: asInstanceOf to cast from Any to number,
+ tracked in http://lampsvn.epfl.ch/trac/scala/ticket/4437 */
+ case BOOL => code.Emit(OpCodes.Call, toBool)
case BYTE => code.Emit(OpCodes.Call, toSByte)
case SHORT => code.Emit(OpCodes.Call, toShort)
case CHAR => code.Emit(OpCodes.Call, toChar)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index aeea6b5452..70e9f78974 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -534,7 +534,7 @@ abstract class Inliners extends SubComponent {
if (settings.debug.value)
log("Making not-private symbol out of synthetic: " + f)
- f setFlag Flags.notPRIVATE
+ if (f hasFlag Flags.PRIVATE) f setFlag Flags.notPRIVATE
true
}
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/compiler/scala/tools/nsc/doc/Index.scala
index e845f8e909..ce44291ade 100644
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/Index.scala
@@ -5,8 +5,8 @@ import scala.collection._
trait Index {
- type SymbolMap = SortedMap[String, SortedSet[model.TemplateEntity]]
+ type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
def firstLetterIndex: Map[Char, SymbolMap]
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index f6dc58617e..8d260c2889 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -45,11 +45,11 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
copyResource("lib/scheduler.js")
copyResource("lib/index.js")
copyResource("lib/template.js")
+
copyResource("lib/index.css")
copyResource("lib/ref-index.css")
copyResource("lib/template.css")
- copyResource("lib/arrow-down.png")
- copyResource("lib/arrow-right.png")
+
copyResource("lib/class.png")
copyResource("lib/class_big.png")
copyResource("lib/object.png")
@@ -58,9 +58,38 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
copyResource("lib/trait_big.png")
copyResource("lib/package.png")
copyResource("lib/package_big.png")
+
+ copyResource("lib/arrow-down.png")
+ copyResource("lib/arrow-right.png")
copyResource("lib/filter_box_left.png")
copyResource("lib/filter_box_right.png")
+ copyResource("lib/filter_box_left2.gif")
+ copyResource("lib/filterbg.gif")
+ copyResource("lib/filterboxbarbg.gif")
+ copyResource("lib/filterboxbg.gif")
+
+ copyResource("lib/constructorsbg.gif")
+ copyResource("lib/defbg-blue.gif")
+ copyResource("lib/defbg-green.gif")
+ copyResource("lib/fullcommenttopbg.gif")
+ copyResource("lib/ownderbg2.gif")
+ copyResource("lib/ownerbg.gif")
+ copyResource("lib/ownerbg2.gif")
+ copyResource("lib/signaturebg.gif")
+ copyResource("lib/signaturebg2.gif")
+ copyResource("lib/packagesbg.gif")
+ copyResource("lib/typebg.gif")
+ copyResource("lib/valuemembersbg.gif")
+ copyResource("lib/filterboxbarbg.png")
+
copyResource("lib/remove.png")
+ copyResource("lib/navigation-li-a.png")
+ copyResource("lib/navigation-li.png")
+ copyResource("lib/selected-right.png")
+ copyResource("lib/selected.png")
+ copyResource("lib/selected2-right.png")
+ copyResource("lib/selected2.png")
+ copyResource("lib/unselected.png")
new page.Index(universe, index) writeFor this
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 4295151952..4fc10b2ec2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -37,10 +37,10 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
val body =
<body>
<div id="library">
- <img class='class icon' width="13" height="13" src={ relativeLinkTo{List("class.png", "lib")} }/>
- <img class='trait icon' width="13" height="13" src={ relativeLinkTo{List("trait.png", "lib")} }/>
- <img class='object icon' width="13" height="13" src={ relativeLinkTo{List("object.png", "lib")} }/>
- <img class='package icon' width="13" height="13" src={ relativeLinkTo{List("package.png", "lib")} }/>
+ <img class='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
+ <img class='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
+ <img class='object icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
+ <img class='package icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
</div>
{ browser }
<div id="content" class="ui-layout-center">
@@ -66,21 +66,13 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
def browser =
<xml:group>
<div id="browser" class="ui-layout-west">
- <div class="ui-west-north">{
- <div class="letters">
- { for(l <- index.firstLetterIndex.keySet.toList.sortBy( _.toString )) yield { // TODO there should be a better way to do that
- val ch = if(l=='#') "%23" else l // url encoding if needed
- <a target="template" href={"index/index-"+ch+".html"}>{l.toUpper}</a> ++ xml.Text(" ")
- } }
- </div>
- }</div>
<div class="ui-west-center">
<div id="filter"></div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
<xml:group>
{ if (!pack.isRootPackage)
- <h3><a class="tplshow" href={ relativeLinkTo(pack) } target="template">{ pack.qualifiedName }</a></h3>
+ <a class="tplshow" href={ relativeLinkTo(pack) } target="template">{ pack.qualifiedName }</a>
else NodeSeq.Empty
}
<ol class="templates">{
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index 25e166a782..3e57f260c3 100755
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package doc
package html
package page
-
+import doc.model._
class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
@@ -25,18 +25,34 @@ class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends
<script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
</xml:group>
+
+ private def entry(name: String, methods: Iterable[MemberEntity]) = {
+ val occurrences = methods.map(method => {
+ val html = templateToHtml(method.inDefinitionTemplates.head)
+ if (method.deprecation.isDefined) {
+ <strike>{ html }</strike>
+ } else {
+ html
+ }
+ })
+
+ <div class="entry">
+ <div class="name">{
+ if (methods.find { ! _.deprecation.isDefined } != None)
+ name
+ else
+ <strike>{ name }</strike>
+ }</div>
+ <div class="occurrences">{
+ for (owner <- occurrences) yield owner ++ xml.Text(" ")
+ }</div>
+ </div>
+ }
+
def body =
- <body>
- { for(groups <- index.firstLetterIndex(letter)) yield {
- <div class="entry">
- <div class="name">{ groups._1 }</div>
- <div class="occurrences">
- { for(owner <- groups._2.view) yield {
- templateToHtml(owner) ++ xml.Text(" ")
- } }
- </div>
- </div>
- } }
- </body>
+ <body>{
+ for(groups <- index.firstLetterIndex(letter)) yield
+ entry(groups._1, groups._2.view)
+ }</body>
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index cd733ced9a..f3e197d0be 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -79,20 +79,23 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
{ if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
<div id="order">
<span class="filtertype">Ordering</span>
- <ol><li class="alpha in">Alphabetic</li><li class="inherit out">By inheritance</li></ol>
+ <ol><li class="alpha in"><span>Alphabetic</span></li><li class="inherit out"><span>By inheritance</span></li></ol>
</div>
}
{ if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
<div id="ancestors">
<span class="filtertype">Inherited</span>
- <ol><li class="hideall">Hide All</li><li class="showall">Show all</li></ol>
- <ol id="linearization">{ (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
+ <ol><li class="hideall out"><span>Hide All</span></li>
+ <li class="showall in"><span>Show all</span></li></ol>
+ <ol id="linearization">{
+ (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li> }
+ }</ol>
</div>
}
{
<div id="visbl">
<span class="filtertype">Visibility</span>
- <ol><li class="public in">Public</li><li class="all out">All</li></ol>
+ <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
}
</div>
@@ -197,7 +200,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case dte: DocTemplateEntity if isSelf =>
// comment of class itself
<xml:group>
- <div id="comment" class="fullcomment">{ memberToCommentBodyHtml(mbr, isSelf = true) }</div>
+ <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, isSelf = true) }</div>
</xml:group>
case dte: DocTemplateEntity if mbr.comment.isDefined =>
// comment of inner, documented class (only short comment, full comment is on the class' own page)
@@ -317,18 +320,28 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case _ => NodeSeq.Empty
}
- val annotations: Seq[scala.xml.Node] =
+ val annotations: Seq[scala.xml.Node] = {
+ // A list of annotations which don't show their arguments, e. g. because they are shown separately.
+ val annotationsWithHiddenArguments = List("deprecated", "Deprecated")
+
+ def showArguments(annotation: Annotation) = {
+ if (annotationsWithHiddenArguments.contains(annotation.qualifiedName)) false else true
+ }
+
if (!mbr.annotations.isEmpty) {
<dt>Annotations</dt>
<dd>{
mbr.annotations.map { annot =>
<xml:group>
- <span class="name">@{ templateToHtml(annot.annotationClass) }</span>{ argumentsToHtml(annot.arguments) }
+ <span class="name">@{ templateToHtml(annot.annotationClass) }</span>{
+ if (showArguments(annot)) argumentsToHtml(annot.arguments) else NodeSeq.Empty
+ }
</xml:group>
}
}
</dd>
} else NodeSeq.Empty
+ }
val sourceLink: Seq[scala.xml.Node] = mbr match {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
@@ -395,28 +408,22 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
val linearization = mbr match {
case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.linearizationTemplates.nonEmpty =>
- <div class="toggleContainer">
- <div class="attributes block">
- <span class="link showElement">Linear Supertypes</span>
- <span class="link hideElement">Linear Supertypes</span>
- </div>
- <div class="superTypes hiddenContent">
- <p>{ typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", ")) }</p>
- </div>
+ <div class="toggleContainer block">
+ <span class="toggle">Linear Supertypes</span>
+ <div class="superTypes hiddenContent">{
+ typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", "))
+ }</div>
</div>
case _ => NodeSeq.Empty
}
val subclasses = mbr match {
case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.subClasses.nonEmpty =>
- <div class="toggleContainer">
- <div class="attributes block">
- <span class="link showElement">Known Subclasses</span>
- <span class="link hideElement">Known Subclasses</span>
- </div>
- <div class="subClasses hiddenContent">
- <p>{ templatesToHtml(dtpl.subClasses.sortBy(_.name), xml.Text(", ")) }</p>
- </div>
+ <div class="toggleContainer block">
+ <span class="toggle">Known Subclasses</span>
+ <div class="subClasses hiddenContent">{
+ templatesToHtml(dtpl.subClasses.sortBy(_.name), xml.Text(", "))
+ }</div>
</div>
case _ => NodeSeq.Empty
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
new file mode 100644
index 0000000000..2e3f5ea530
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
new file mode 100644
index 0000000000..69038337a7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
new file mode 100644
index 0000000000..36c43be3a2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
index 4127dbf3c2..0e8c893315 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
new file mode 100644
index 0000000000..b9b49076a6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
index 942736e44d..f127e35b48 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
new file mode 100644
index 0000000000..542ba4aa5a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
new file mode 100644
index 0000000000..b5075c16cd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
new file mode 100644
index 0000000000..d613cf5633
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
new file mode 100644
index 0000000000..ae2f85823b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
new file mode 100644
index 0000000000..a0d93f4844
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index 7c05678313..e8c7bb961f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -2,7 +2,7 @@
color: inherit;
font-size: 10pt;
text-decoration: none;
- font-family: sans-serif;
+ font-family: Arial;
border-width: 0px;
padding: 0px;
margin: 0px;
@@ -20,9 +20,13 @@ h1 {
display: none;
}
-.letters {
+/*.letters {
+ font-family: monospace;
+ font-size: 2pt;
padding: 5px;
-}
+ background-color: #DADADA;
+ text-shadow: #ffffff 0 1px 0;
+}*/
#library {
display: none;
@@ -40,16 +44,70 @@ h1 {
#filter {
position: absolute;
display: block;
- padding: 5px;
+/* padding: 5px;*/
right: 0;
left: 0;
top: 0;
- background-color: #DADADA;
+ background-image:url('filterbg.gif');
+ background-repeat:repeat-x;
+ background-color: #ededee; /* light gray */
+ /*background-color: #DADADA;*/
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-left:0;
+ border-right:0;
}
#textfilter {
position: relative;
display: block;
+ height: 20px;
+ margin-top: 5px;
+ margin-bottom: 5px;
+}
+
+#textfilter > .pre {
+ display: block;
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 23px;
+ width: 21px;
+ background: url("filter_box_left.png");
+}
+
+#textfilter > .input {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
+}
+
+#textfilter > .input > input {
+ height: 21px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
+ background-color: white;
+ background: url("filterboxbarbg.png");
+ background-repeat:repeat-x;
+ width: 100%;
+}
+
+#textfilter > .post {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 23px;
+ width: 21px;
+ background: url("filter_box_right.png");
+}
+
+/*#textfilter {
+ position: relative;
+ display: block;
height: 20px;
margin-bottom: 5px;
}
@@ -89,17 +147,20 @@ h1 {
height: 20px;
width: 20px;
background: url("filter_box_right.png");
-}
+}*/
#focusfilter {
position: relative;
+ text-align: center;
display: block;
padding: 5px;
- background-color: pink;
+ background-color: #fffebd; /* light yellow*/
+ text-shadow: #ffffff 0 1px 0;
}
#focusfilter .focuscoll {
font-weight: bold;
+ text-shadow: #ffffff 0 1px 0;
}
#focusfilter img {
@@ -111,14 +172,41 @@ h1 {
position: relative;
display: block;
padding: 5px;
- background-color: #999;
+/* background-color: #999;*/
text-align: center;
}
-
#kindfilter > a {
- color: white;
- text-decoration: underline;
+ color: black;
+/* text-decoration: underline;*/
+ text-shadow: #ffffff 0 1px 0;
+
+}
+
+#kindfilter > a:hover {
+ color: #4C4C4C;
+ text-decoration: none;
+ text-shadow: #ffffff 0 1px 0;
+
+}
+
+#letters {
+ position: relative;
+ text-align: center;
+ padding-bottom: 5px;
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-left:0;
+ border-right:0;
+}
+
+#letters > a {
+/* font-family: monospace;*/
+ color: #858484;
+ font-weight: bold;
+ font-size: 8pt;
+ text-shadow: #ffffff 0 1px 0;
+ padding-right: 2px;
}
#tpl {
@@ -138,7 +226,6 @@ h1 {
float: right;
font-weight: normal;
color: white;
- padding: 0 8px;
}
#tpl .packfocus {
@@ -149,23 +236,32 @@ h1 {
}
#tpl .packages > ol {
- /*margin-bottom: 5px;*/
+ background-color: #dadfe6;
+ /*margin-bottom: 5px;*/
}
-#tpl .packages > ol > li {
+/*#tpl .packages > ol > li {
margin-bottom: 1px;
+}*/
+
+#tpl .packages > li > a {
+ padding: 0px 5px;
}
-#tpl .packages > li > h3 {
+#tpl .packages > li > a.tplshow {
display: block;
color: white;
font-weight: bold;
display: block;
+ text-shadow: #000000 0 1px 0;
}
#tpl ol > li.pack {
- background-color: #2C475C;
padding: 3px 5px;
+ background: url("packagesbg.gif");
+ background-repeat:repeat-x;
+ min-height: 14px;
+ background-color: #6e808e;
}
#tpl ol > li {
@@ -173,23 +269,24 @@ h1 {
}
#tpl .templates > li {
- padding-left: 5px;
+ padding-left: 5px;
+ min-height: 18px;
}
#tpl ol > li .icon {
- padding-right: 5px;
+ padding-right: 5px;
bottom: -2px;
position: relative;
}
#tpl .templates div.placeholder {
- padding-right: 5px;
- width: 13px;
- display: inline-block;
+ padding-right: 5px;
+ width: 13px;
+ display: inline-block;
}
#tpl .templates span.tplLink {
- padding-left: 5px;
+ padding-left: 5px;
}
#content {
@@ -212,12 +309,18 @@ h1 {
}
.ui-layout-pane {
- background: #FFF;
- overflow: auto;
+ background: #FFF;
+ overflow: auto;
}
.ui-layout-resizer {
- background: #DDD;
+ background-image:url('filterbg.gif');
+ background-repeat:repeat-x;
+ background-color: #ededee; /* light gray */
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-bottom:0;
+ border-left: 0;
}
.ui-layout-toggler {
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index b06d5dc369..0f25f024f4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -9,7 +9,9 @@ var scheduler = undefined;
var kindFilterState = undefined;
var focusFilterState = undefined;
-var title = $(document).attr('title')
+var title = $(document).attr('title');
+
+var lastHash = "";
$(document).ready(function() {
$('body').layout({ west__size: '20%' });
@@ -17,10 +19,12 @@ $(document).ready(function() {
center__paneSelector: ".ui-west-center"
//,center__initClosed:true
,north__paneSelector: ".ui-west-north"
- });
+ });
$('iframe').bind("load", function(){
var subtitle = $(this).contents().find('title').text();
$(document).attr('title', (title ? title + " - " : "") + subtitle);
+
+ setUrlFragmentFromFrameSrc();
});
// workaround for IE's iframe sizing lack of smartness
@@ -43,8 +47,60 @@ $(document).ready(function() {
configureKindFilter();
configureEntityList();
+ setFrameSrcFromUrlFragment();
+
+ // If the url fragment changes, adjust the src of iframe "template".
+ $(window).bind('hashchange', function() {
+ if(lastFragment != window.location.hash) {
+ lastFragment = window.location.hash;
+ setFrameSrcFromUrlFragment();
+ }
+ });
});
+// Set the iframe's src according to the fragment of the current url.
+// fragment = "#scala.Either" => iframe url = "scala/Either.html"
+// fragment = "#scala.Either@isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean"
+function setFrameSrcFromUrlFragment() {
+ var fragment = location.hash.slice(1);
+ if(fragment) {
+ var loc = fragment.split("@")[0].replace(/\./g, "/");
+ if(loc.indexOf(".html") < 0) loc += ".html";
+ if(fragment.indexOf('@') > 0) loc += ("#" + fragment.split("@", 2)[1]);
+ frames["template"].location.replace(loc);
+ }
+ else
+ frames["template"].location.replace("package.html");
+}
+
+// Set the url fragment according to the src of the iframe "template".
+// iframe url = "scala/Either.html" => url fragment = "#scala.Either"
+// iframe url = "scala/Either.html#isRight:Boolean" => url fragment = "#scala.Either@isRight:Boolean"
+function setUrlFragmentFromFrameSrc() {
+ try {
+ var commonLength = location.pathname.lastIndexOf("/");
+ var frameLocation = frames["template"].location;
+ var relativePath = frameLocation.pathname.slice(commonLength + 1);
+
+ if(!relativePath || frameLocation.pathname.indexOf("/") < 0)
+ return;
+
+ // Add #, remove ".html" and replace "/" with "."
+ fragment = "#" + relativePath.replace(/\.html$/, "").replace(/\//g, ".");
+
+ // Add the frame's hash after an @
+ if(frameLocation.hash) fragment += ("@" + frameLocation.hash.slice(1));
+
+ // Use replace to not add history items
+ lastFragment = fragment;
+ location.replace(fragment);
+ }
+ catch(e) {
+ // Chrome doesn't allow reading the iframe's location when
+ // used on the local file system.
+ }
+}
+
var Index = {};
(function (ns) {
@@ -70,12 +126,11 @@ var Index = {};
return [
'<li class="pack">',
'<a class="packfocus">focus</a><a class="packhide">hide</a>',
- '<h3>',
'<a class="tplshow" target="template" href="',
pack.replace(/\./g, '/'),
'/package.html">',
pack,
- '</a></h3></li>'
+ '</a></li>'
].join('');
};
@@ -139,13 +194,13 @@ var Index = {};
function subPackages(pack) {
return $.grep($('#tpl ol.packages'), function (element, index) {
- var pack = $('h3', element).text();
+ var pack = $('li.pack > .tplshow', element).text();
return pack.indexOf(pack + '.') == 0;
});
}
ns.hidePackage = function (ol) {
- var selected = $('h3', ol).text();
+ var selected = $('li.pack > .tplshow', ol).text();
hiddenPackages[selected] = true;
$('ol.templates', ol).hide();
@@ -156,7 +211,7 @@ var Index = {};
}
ns.showPackage = function (ol, state) {
- var selected = $('h3', ol).text();
+ var selected = $('li.pack > .tplshow', ol).text();
hiddenPackages[selected] = false;
$('ol.templates', ol).show();
@@ -166,7 +221,7 @@ var Index = {};
// When the filter is in "packs" state,
// we don't want to show the `.templates`
- var key = $('h3', element).text();
+ var key = $('li.pack > .tplshow', element).text();
if (hiddenPackages[key] || state == 'packs') {
$('ol.templates', element).hide();
}
@@ -209,6 +264,7 @@ function prepareEntityList() {
function configureTextFilter() {
scheduler.add("init", function() {
$("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>");
+ printAlphabet();
var input = $("#textfilter input");
resizeFilterBlock();
input.bind("keyup", function(event) {
@@ -343,7 +399,7 @@ function configureFocusFilter() {
function focusFilter(package) {
scheduler.clear("filter");
- var currentFocus = $('h3', package).text();
+ var currentFocus = $('li.pack > .tplshow', package).text();
$("#focusfilter > .focuscoll").empty();
$("#focusfilter > .focuscoll").append(currentFocus);
@@ -391,3 +447,8 @@ function kindFilterSync() {
function resizeFilterBlock() {
$("#tpl").css("top", $("#filter").outerHeight(true));
}
+
+function printAlphabet() {
+ $("#filter").append("<div id='letters'><a target='template' href='index/index-%23.html'>#</a><a target='template' href='index/index-a.html'>A</a><a target='template' href='index/index-b.html'>B</a><a target='template' href='index/index-c.html'>C</a><a target='template' href='index/index-d.html'>D</a><a target='template' href='index/index-e.html'>E</a><a target='template' href='index/index-f.html'>F</a><a target='template' href='index/index-g.html'>G</a><a target='template' href='index/index-h.html'>H</a><a target='template' href='index/index-i.html'>I</a><a target='template' href='index/index-j.html'>J</a><a target='template' href='index/index-k.html'>K</a><a target='template' href='index/index-l.html'>L</a><a target='template' href='index/index-m.html'>M</a><a target='template' href='index/index-n.html'>N</a><a target='template' href='index/index-o.html'>O</a><a target='template' href='index/index-p.html'>P</a><a target='template' href='index/index-q.html'>Q</a><a target='template' href='index/index-r.html'>R</a><a target='template' href='index/index-s.html'>S</a><a target='template' href='index/index-t.html'>T</a><a target='template' href='index/index-u.html'>U</a><a target='template' href='index/index-v.html'>V</a><a target='template' href='index/index-w.html'>W</a><a target='template' href='index/index-x.html'>X</a><a target='template' href='index/index-y.html'>Y</a><a target='template' href='index/index-z.html'>Z</a></div>");
+}
+
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
new file mode 100644
index 0000000000..9b32288e04
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
new file mode 100644
index 0000000000..fd0ad06e81
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
new file mode 100644
index 0000000000..848dd5963a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
new file mode 100644
index 0000000000..34a04249ee
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
new file mode 100644
index 0000000000..2ed33b0aa4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
new file mode 100644
index 0000000000..00c3378a2a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
new file mode 100644
index 0000000000..04eda2f307
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
new file mode 100644
index 0000000000..c89765239e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
new file mode 100644
index 0000000000..bf984ef0ba
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
new file mode 100644
index 0000000000..a790bb1169
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
new file mode 100644
index 0000000000..b6ac4415e4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
new file mode 100644
index 0000000000..9aae5ba0aa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 192a56f9bb..0b2bc7f7b1 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -30,7 +30,7 @@ input { border-width: 0px; }
/* Page */
body {
- font-family: sans-serif;
+ font-family: Arial, sans-serif;
font-size: 10pt;
}
@@ -45,12 +45,23 @@ a[href]:hover {
#owner {
padding: 2px 10px 5px;
+ height: 17px;
font-size: 12pt;
display: block;
}
#owner a {
color: black;
+/* text-decoration: none;*/
+/* font-weight: bold;*/
+/* text-shadow: white 0px 1px 0px;*/
+}
+
+#owner a:hover {
+ color: black;
+ text-decoration: none;
+/* font-weight: bold;*/
+/* text-shadow: white 0px 1px 0px;*/
}
.value #owner {
@@ -58,16 +69,23 @@ a[href]:hover {
}
.type #owner {
- background-color: #699D84; /* lighter green */
+ background-color: #95b8a1; /* lighter green */
+/* background-image:url('ownerbg2.gif');
+ background-repeat:repeat-x;*/
}
#types ol li > p {
margin-top: 5px;
}
+#types ol li:last-child {
+ margin-bottom: 5px;
+}
+
#definition {
display: block;
padding: 5px 6px;
+ height: 61px;
}
#definition > img {
@@ -77,19 +95,36 @@ a[href]:hover {
#definition > h1 {
padding: 12px 0 12px 6px;
color: white;
+/* text-shadow: 3px black;
+ text-shadow: black 0px 2px 0px;
+ */
font-size: 24pt;
display: inline-block;
}
#definition h1 > a {
- color: white;
+ color: #ffffff;
+/* text-decoration: none; */
+/* font-size: 24pt;
+ text-shadow: black 0px 0px 0px;
+ */
+ }
+
+#definition h1 > a:hover {
+ color: #ffffff;
+ text-decoration: none;
font-size: 24pt;
-}
+/* text-shadow: black 0px 2px 0px;
+ */
+ }
#signature {
- background-color: #EBEBEB;
+ background-image:url('signaturebg2.gif');
+ background-color: #d7d7d7;
+ min-height: 18px;
+ background-repeat:repeat-x;
font-size: 11.5pt;
- margin-bottom: 10px;
+/* margin-bottom: 10px;*/
padding: 8px;
}
@@ -99,51 +134,52 @@ a[href]:hover {
text-align: left;
width: auto;
position: static;
+/* text-shadow: 2px white;
+ text-shadow: white 0px 1px 0px;
+ */
}
#signature > span.symbol {
text-align: left;
display: inline;
padding-left: 0.7em;
+/* text-shadow: 2px white;
+ text-shadow: white 0px 1px 0px; */
}
/* Linear super types and known subclasses */
-.hideElement,
.hiddenContent {
display: none;
}
-div.superTypes > h1 {
- font-style: italic;
- padding-top: 0.5em;
-}
-
-.link {
- text-decoration: underline;
+.toggleContainer .toggle {
cursor: pointer;
- color: #315479;
+ padding-left: 15px;
+ background: url("arrow-right.png") no-repeat 0 3px transparent;
+}
+
+.toggleContainer.open .toggle {
+ background: url("arrow-down.png") no-repeat 0 3px transparent;
}
-.toggleContainer p {
+.toggleContainer .hiddenContent {
margin-top: 5px;
}
.toggleContainer .showElement {
padding-left: 15px;
- background: url("arrow-right.png") no-repeat 0 5px transparent;
-}
-
-.toggleContainer .hideElement {
- padding-left: 15px;
- background: url("arrow-down.png") no-repeat 0 5px transparent;
}
.value #definition {
background-color: #2C475C; /* blue */
+ background-image:url('defbg-blue.gif');
+ background-repeat:repeat-x;
}
.type #definition {
background-color: #316555; /* green */
+ background-image:url('defbg-green.gif');
+ background-repeat:repeat-x;
}
#template {
@@ -155,6 +191,7 @@ h3 {
padding: 5px 10px;
font-size: 12pt;
font-weight: bold;
+ text-shadow: black 1px 1px 0px;
}
dl.attributes > dt {
@@ -171,19 +208,41 @@ dl.attributes > dd {
#template .values > h3 {
background-color: #315479; /* grayish blue */
+ background: url("valuemembersbg.gif");
+ background-repeat:repeat-x;
+ height: 18px;
+}
+
+#values ol li:last-child {
+ margin-bottom: 5px;
}
#template .types > h3 {
background-color: #316555; /* green */
+ background: url("typebg.gif");
+ background-repeat:repeat-x;
+ height: 18px;
}
#constructors > h3 {
background-color: #333;
+ background: url("constructorsbg.gif");
+ background-repeat:repeat-x;
+ height: 18px;
}
#template > div.parent > h3 {
- background-color: #3B3B3B;
- margin-top: 10px;
+ background-color: #dadada;
+/* border:1px solid #6f6f6f;
+ border-left:0;
+ border-right:0;*/
+ background: url("constructorsbg.gif");
+ background-repeat:repeat-x;
+ height: 17px;
+/* margin-top: 10px;*/
+/* color: black;
+ text-shadow: white 0px 1px 0px;
+ font-weight: normal;8*/
font-style: italic;
font-size: 12pt;
}
@@ -229,6 +288,8 @@ div.members > ol > li:last-child {
line-height: 18px;
clear: both;
display: block;
+/* text-shadow: 2px white;
+ text-shadow: white 0px 1px 0px; */
}
.signature .kind {
@@ -405,10 +466,17 @@ p.comment {
margin: 5px 10px;
}
+div.fullcommenttop {
+ padding: 10px 10px;
+ background-image:url('fullcommenttopbg.gif');
+ background-repeat:repeat-x;
+}
+
div.fullcomment {
margin: 5px 10px;
}
+#template div.fullcommenttop,
#template div.fullcomment {
display:none;
margin: 5px 0 0 8.7em;
@@ -425,6 +493,13 @@ div.fullcomment .block {
margin-top: 5px;
}
+div.fullcommenttop .block {
+ padding: 5px 0 0;
+ border-top: 1px solid #EBEBEB;
+ margin-top: 5px;
+ margin-bottom: 5px
+}
+
div.fullcomment div.block ol li p,
div.fullcomment div.block ol li {
display:inline
@@ -440,16 +515,20 @@ div.fullcomment .comment {
margin: 5px 0 10px;
}
+div.fullcommenttop .comment:last-child,
div.fullcomment .comment:last-child {
margin-bottom: 0;
}
+
+div.fullcommenttop dl.paramcmts > dt,
div.fullcomment dl.paramcmts > dt {
display: block;
float: left;
font-weight: bold;
}
+div.fullcommenttop dl.paramcmts > dd,
div.fullcomment dl.paramcmts > dd {
display: block;
padding-left: 80px;
@@ -470,8 +549,8 @@ div.fullcomment dl.paramcmts > dd {
position: absolute;
top: 0;
left: 0;
- height: 20px;
- width: 20px;
+ height: 23px;
+ width: 21px;
background: url("filter_box_left.png");
}
@@ -484,12 +563,14 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input > input {
- height: 16px;
- padding: 2px;
- font-weight: bold;
- color: #993300;
+ height: 21px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
background-color: white;
- width: 100%;
+ background: url("filterboxbarbg.png");
+ background-repeat:repeat-x;
+ width: 100%;
}
#textfilter > .post {
@@ -497,14 +578,16 @@ div.fullcomment dl.paramcmts > dd {
position: absolute;
top: 0;
right: 0;
- height: 20px;
- width: 20px;
+ height: 23px;
+ width: 21px;
background: url("filter_box_right.png");
}
#mbrsel {
padding: 5px 10px;
- background-color: #DADADA; /* light gray */
+ background-color: #ededee; /* light gray */
+ background-image:url('filterboxbg.gif');
+ background-repeat:repeat-x;
font-size: 9.5pt;
display: block;
margin-top: 1em;
@@ -520,47 +603,103 @@ div.fullcomment dl.paramcmts > dd {
}
#mbrsel > div > span.filtertype {
- padding: 3px 0;
+ padding: 4px;
+ margin-right: 5px;
float: left;
display: inline-block;
- color: #404040;
+ color: #000000;
+ font-weight: bold;
+ text-shadow: white 0px 1px 0px;
width: 4.5em;
}
#mbrsel > div > ol {
display: inline-block;
- /*background-color: white;*/
}
#mbrsel > div > ol#linearization {
- display: block;
- margin-left: 4.5em;
- margin-top: 5px;
+ display: table;
+ margin-left: 70px;
+}
+
+#mbrsel > div > ol#linearization > li.in {
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
+ background: url(selected-right.png) no-repeat;
+ background-position: right 0px;
+}
+
+#mbrsel > div > ol#linearization > li.in > span{
+ color: #4C4C4C;
+ float: left;
+ padding: 1px 0 1px 10px;
+ background: url(selected.png) no-repeat;
+ background-position: 0px 0px;
+ text-shadow: #ffffff 0 1px 0;
}
#mbrsel > div > ol > li {
- padding: 3px 10px;
- background-color: white;
+/* padding: 3px 10px;*/
+ line-height: 16pt;
display: inline-block;
cursor: pointer;
}
#mbrsel > div > ol > li.in {
- background-color: white;
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
+ background: url(selected-right.png) no-repeat;
+ background-position: right 0px;
+}
+
+#mbrsel > div > ol > li.in > span{
color: #4C4C4C;
+ float: left;
+ padding: 1px 0 1px 10px;
+ background: url(selected.png) no-repeat;
+ background-position: 0px 0px;
+ text-shadow: #ffffff 0 1px 0;
}
#mbrsel > div > ol > li.out {
- color: #4B4B4B;
- background-color: #999;
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
}
+#mbrsel > div > ol > li.out > span{
+ color: #4B4B4B;
+/* background-color: #999; */
+ float: left;
+ padding: 1px 0 1px 10px;
+/* background: url(unselected.png) no-repeat;*/
+ background-position: 0px -1px;
+ text-shadow: #ffffff 0 1px 0;
+}
+/*
#mbrsel .hideall {
color: #4C4C4C;
+ line-height: 16px;
+ font-weight: bold;
+}
+
+#mbrsel .hideall span {
+ color: #4C4C4C;
font-weight: bold;
}
#mbrsel .showall {
color: #4C4C4C;
+ line-height: 16px;
font-weight: bold;
}
+
+#mbrsel .showall span {
+ color: #4C4C4C;
+ font-weight: bold;
+}*/ \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 14a5613015..2479945299 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -263,5 +263,11 @@ function filter() {
function windowTitle()
{
- parent.document.title=document.title;
+ try {
+ parent.document.title=document.title;
+ }
+ catch(e) {
+ // Chrome doesn't allow settings the parent's title when
+ // used on the local file system.
+ }
};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
new file mode 100644
index 0000000000..2fcc77b2e8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
new file mode 100644
index 0000000000..d5ac639405
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
new file mode 100644
index 0000000000..2a949311d7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 6c599e1a88..3d3d3d7876 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -18,7 +18,7 @@ object IndexModelFactory {
val result = new mutable.HashMap[Char,SymbolMap] {
/* Owner template ordering */
- implicit def orderingSet = math.Ordering.String.on { x: TemplateEntity => x.name.toLowerCase }
+ implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
@@ -27,14 +27,13 @@ object IndexModelFactory {
val ch = d.name.head.toLower
if(ch.isLetterOrDigit) ch else '#'
}
- this(firstLetter) =
- if(this.contains(firstLetter)) {
- val letter = this(firstLetter)
- val value = this(firstLetter).get(d.name).getOrElse(SortedSet.empty[TemplateEntity]) + d.inDefinitionTemplates.head
- letter + ((d.name, value))
- } else {
- immutable.SortedMap( (d.name, SortedSet(d.inDefinitionTemplates.head)) )
+ val letter = this.get(firstLetter).getOrElse {
+ immutable.SortedMap[String, SortedSet[MemberEntity]]()
}
+ val members = letter.get(d.name).getOrElse {
+ SortedSet.empty[MemberEntity](Ordering.by { _.toString })
+ } + d
+ this(firstLetter) = letter + (d.name -> members)
}
}
@@ -63,4 +62,4 @@ object IndexModelFactory {
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index 8ec93d70e3..64178b5eac 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -253,7 +253,7 @@ trait CompilerControl { self: Global =>
// items that get sent to scheduler
abstract class WorkItem extends (() => Unit) {
- def onCompilerThread = self.onCompilerThread
+ val onCompilerThread = self.onCompilerThread
}
case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 2563feeb72..9b50b82bb3 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -537,11 +537,12 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
} catch {
case CancelException =>
debugLog("cancelled")
-/* Commented out. Typing should always cancel requests
case ex @ FreshRunReq =>
- scheduler.postWorkItem(() => respondGradually(response)(op))
+ if (debugIDE) {
+ println("FreshRunReq thrown during response")
+ }
+ response raise ex
throw ex
-*/
case ex =>
if (debugIDE) {
println("exception thrown during response: "+ex)
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index b0776cc251..bfc1c2dbc2 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -111,7 +111,9 @@ object Plugin {
*/
def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] =
loadDescription(jarfile) match {
- case None => None
+ case None =>
+ println("Warning: could not load descriptor for plugin %s".format(jarfile))
+ None
case Some(pdesc) =>
try Some(loader loadClass pdesc.classname) catch {
case _: Exception =>
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index d17077b129..bbdbb09a9d 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -408,7 +408,8 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
def isDeprecated = hasAnnotation(DeprecatedAttr)
- def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap { _.stringArg(0) }
+ def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
+ def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
// !!! when annotation arguments are not literal strings, but any sort of
// assembly of strings, there is a fair chance they will turn up here not as
// Literal(const) but some arbitrary AST. However nothing in the compiler
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 96ff17e897..a1ff7d99ef 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -895,7 +895,7 @@ abstract class ClassfileParser {
in.skip(attrLen)
case tpnme.DeprecatedATTR =>
val arg = Literal(Constant("see corresponding Javadoc for more information."))
- sym.addAnnotation(AnnotationInfo(definitions.DeprecatedAttr.tpe, List(arg), List()))
+ sym addAnnotation AnnotationInfo(definitions.DeprecatedAttr.tpe, List(arg, Literal(Constant(""))), Nil)
in.skip(attrLen)
case tpnme.ConstantValueATTR =>
val c = pool.getConstant(in.nextChar)
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 9980db6ff5..ce76117cfd 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -543,7 +543,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
typedWithPos(tree.pos) {
if (isValueClass(tpe.typeSymbol)) {
if (tpe.typeSymbol == UnitClass)
- Select(REF(BoxedUnit_TYPE), BoxedUnit_TYPE)
+ REF(BoxedUnit_TYPE)
else
Select(REF(boxedModule(tpe.typeSymbol)), nme.TYPE_)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index f9f2f94917..36552894e5 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -13,6 +13,7 @@ import Flags._
abstract class Erasure extends AddInterfaces
with typechecker.Analyzer
+ with TypingTransformers
with ast.TreeDSL
{
import global._
@@ -922,9 +923,12 @@ abstract class Erasure extends AddInterfaces
* - Add bridge definitions to a template.
* - Replace all types in type nodes and the EmptyTree object by their erasure.
* Type nodes of type Unit representing result types of methods are left alone.
+ * - Given a selection q.s, where the owner of `s` is not accessible but the
+ * type symbol of q's type qT is accessible, insert a cast (q.asInstanceOf[qT]).s
+ * This prevents illegal access errors (see #4283).
* - Reset all other type attributes to null, thus enforcing a retyping.
*/
- private val preTransformer = new Transformer {
+ private val preTransformer = new TypingTransformer(unit) {
def preErase(tree: Tree): Tree = tree match {
case ClassDef(mods, name, tparams, impl) =>
if (settings.debug.value)
@@ -1042,14 +1046,25 @@ abstract class Erasure extends AddInterfaces
}
}
- case Select(_, _) =>
+ case Select(qual, name) =>
+ val owner = tree.symbol.owner
// println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
- if (tree.symbol.owner.isRefinementClass) {
+ if (owner.isRefinementClass) {
val overridden = tree.symbol.allOverriddenSymbols
assert(!overridden.isEmpty, tree.symbol)
tree.symbol = overridden.head
}
- tree
+ def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
+ if (!isAccessible(owner) && qual.tpe != null) {
+ // Todo: Figure out how qual.tpe could be null in the check above (it does appear in build where SwingWorker.this
+ // has a null type).
+ val qualSym = qual.tpe.widen.typeSymbol
+ if (isAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) {
+ // insert cast to prevent illegal access error (see #4283)
+ // util.trace("insert erasure cast ") (*/
+ treeCopy.Select(tree, qual AS_ATTR qual.tpe.widen, name) //)
+ } else tree
+ } else tree
case Template(parents, self, body) =>
assert(!currentOwner.isImplClass)
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 2ab7daa901..9708372ee4 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -72,11 +72,21 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
tree match {
case DefDef(mods, name, tparams, vparams, tpt, rhs) => atOwner(tree.symbol) {
val res = if (!sym.owner.isClass && sym.isLazy) {
- val enclosingDummyOrMethod =
- if (sym.enclMethod == NoSymbol) sym.owner else sym.enclMethod
- val idx = lazyVals(enclosingDummyOrMethod)
- lazyVals(enclosingDummyOrMethod) = idx + 1
- val rhs1 = mkLazyDef(enclosingDummyOrMethod, super.transform(rhs), idx, sym)
+ val enclosingClassOrDummyOrMethod = {
+ val enclMethod = sym.enclMethod
+
+ if (enclMethod != NoSymbol ) {
+ val enclClass = sym.enclClass
+ if (enclClass != NoSymbol && enclMethod == enclClass.enclMethod)
+ enclClass
+ else
+ enclMethod
+ } else
+ sym.owner
+ }
+ val idx = lazyVals(enclosingClassOrDummyOrMethod)
+ lazyVals(enclosingClassOrDummyOrMethod) = idx + 1
+ val rhs1 = mkLazyDef(enclosingClassOrDummyOrMethod, super.transform(rhs), idx, sym)
sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
rhs1
} else
@@ -103,7 +113,18 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
case _ =>
stat
}
- treeCopy.Template(tree, parents, self, stats)
+ val innerClassBitmaps = if (!added && currentOwner.isClass && bitmaps.contains(currentOwner)) {
+ // add bitmap to inner class if necessary
+ val toAdd0 = bitmaps(currentOwner).map(s => typed(ValDef(s, ZERO)))
+ toAdd0.foreach(t => {
+ if (currentOwner.info.decl(t.symbol.name) == NoSymbol) {
+ t.symbol.setFlag(PROTECTED)
+ currentOwner.info.decls.enter(t.symbol)
+ }
+ })
+ toAdd0
+ } else List()
+ treeCopy.Template(tree, parents, self, innerClassBitmaps ++ stats)
}
case ValDef(mods, name, tpt, rhs0) if (!sym.owner.isModule && !sym.owner.isClass) =>
@@ -186,10 +207,13 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* ()
* }
*/
- private def mkLazyDef(meth: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): Tree = {
- val bitmapSym = getBitmapFor(meth, offset)
+ private def mkLazyDef(methOrClass: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): Tree = {
+ val bitmapSym = getBitmapFor(methOrClass, offset)
val mask = LIT(1 << (offset % FLAGS_PER_WORD))
- def mkBlock(stmt: Tree) = BLOCK(stmt, mkSetFlag(bitmapSym, mask), UNIT)
+ val bitmapRef = if (methOrClass.isClass) Select(This(methOrClass), bitmapSym) else Ident(bitmapSym)
+
+ def mkBlock(stmt: Tree) = BLOCK(stmt, mkSetFlag(bitmapSym, mask, bitmapRef), UNIT)
+
val (block, res) = tree match {
case Block(List(assignment), res) if !lazyUnit(lazyVal) =>
@@ -198,16 +222,16 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
(mkBlock(rhs), UNIT)
}
- val cond = (Ident(bitmapSym) INT_& mask) INT_== ZERO
+ val cond = (bitmapRef INT_& mask) INT_== ZERO
atPos(tree.pos)(localTyper.typed {
- def body = gen.mkDoubleCheckedLocking(meth.enclClass, cond, List(block), Nil)
+ def body = gen.mkDoubleCheckedLocking(methOrClass.enclClass, cond, List(block), Nil)
BLOCK(body, res)
})
}
- private def mkSetFlag(bmp: Symbol, mask: Tree): Tree =
- Ident(bmp) === (Ident(bmp) INT_| mask)
+ private def mkSetFlag(bmp: Symbol, mask: Tree, bmpRef: Tree): Tree =
+ bmpRef === (bmpRef INT_| mask)
val bitmaps = new mutable.HashMap[Symbol, List[Symbol]] {
override def default(meth: Symbol) = Nil
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 49f5907245..d98281aaa4 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -675,7 +675,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def bitmapOperation[T](field: Symbol, transientCase: => T, privateCase: => T, rest: => T): T =
if (field.accessed.hasAnnotation(TransientAttr))
transientCase
- else if (field.hasFlag(PRIVATE))
+ else if (field.hasFlag(PRIVATE) || field.hasFlag(notPRIVATE))
privateCase
else
rest
@@ -686,7 +686,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Similarly fields in the checkinit mode use private bitmaps.
*/
def localBitmapField(field: Symbol) =
- field.accessed.hasAnnotation(TransientAttr) || field.hasFlag(PRIVATE) || checkinitField(field)
+ field.accessed.hasAnnotation(TransientAttr) || field.hasFlag(PRIVATE | notPRIVATE) || checkinitField(field)
/**
* Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
@@ -705,7 +705,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapName = if (checkinitField) bitmapCheckinitName else bitmapLazyName
def createBitmap: Symbol = {
-
val sym = clazz0.newVariable(clazz0.pos, bitmapName).setInfo(IntClass.tpe)
atPhase(currentRun.typerPhase) {
sym addAnnotation AnnotationInfo(VolatileAttr.tpe, Nil, Nil)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index eb9eb9dbec..7567cf6483 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -383,7 +383,7 @@ trait Contexts { self: Analyzer =>
* @param superAccess ...
* @return ...
*/
- def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean): Boolean = {
+ def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
@@ -507,7 +507,7 @@ trait Contexts { self: Analyzer =>
*/
private def isQualifyingImplicit(sym: Symbol, pre: Type, imported: Boolean) =
sym.isImplicit &&
- isAccessible(sym, pre, false) &&
+ isAccessible(sym, pre) &&
!(imported && {
val e = scope.lookupEntry(sym.name)
(e ne null) && (e.owner == scope)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3fc8344d8d..a05aefe1fa 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -13,7 +13,7 @@ package typechecker
import annotation.tailrec
import scala.collection.{ mutable, immutable }
-import mutable.{ LinkedHashMap, ListBuffer }
+import mutable.{ HashMap, LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
import util.Statistics._
@@ -65,12 +65,20 @@ trait Implicits {
result
}
- final val sizeLimit = 50000
+ private final val sizeLimit = 50000
private type Infos = List[ImplicitInfo]
private type Infoss = List[List[ImplicitInfo]]
- val implicitsCache = new LinkedHashMap[Type, Infoss]
+ private type InfoMap = LinkedHashMap[Symbol, List[ImplicitInfo]]
+ private val implicitsCache = new LinkedHashMap[Type, Infoss]
+ private val infoMapCache = new LinkedHashMap[Symbol, InfoMap]
+ private val improvesCache = new HashMap[(ImplicitInfo, ImplicitInfo), Boolean]
+
+ def resetImplicits() {
+ implicitsCache.clear()
+ infoMapCache.clear()
+ improvesCache.clear()
+ }
- def resetImplicits() { implicitsCache.clear() }
private val ManifestSymbols = Set(PartialManifestClass, FullManifestClass, OptManifestClass)
/** The result of an implicit search
@@ -223,8 +231,17 @@ trait Implicits {
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
- (info1 != NoImplicitInfo) &&
- isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
+ (info1 != NoImplicitInfo) && {
+ if (info1.sym.isStatic && info2.sym.isStatic) {
+ improvesCache get (info1, info2) match {
+ case Some(b) => incCounter(improvesCachedCount); b
+ case None =>
+ val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
+ improvesCache((info1, info2)) = result
+ result
+ }
+ } else isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
+ }
}
/** Map all type params in given list to WildcardType
@@ -745,6 +762,106 @@ trait Implicits {
* can be accessed with unambiguous stable prefixes, the implicits infos
* which are members of these companion objects.
*/
+ private def companionImplicitMap(tp: Type): InfoMap = {
+ val infoMap = new InfoMap
+ val seen = mutable.HashSet[Type]() // cycle detection
+
+ def getClassParts(tp: Type, infoMap: InfoMap, seen: mutable.Set[Type]) = tp match {
+ case TypeRef(pre, sym, args) =>
+ infoMap get sym match {
+ case Some(infos1) =>
+ if (infos1.nonEmpty && !(pre =:= infos1.head.pre.prefix)) {
+ println("amb prefix: "+pre+"#"+sym+" "+infos1.head.pre.prefix+"#"+sym)
+ infoMap(sym) = List() // ambiguous prefix - ignore implicit members
+ }
+ case None =>
+ if (pre.isStable) {
+ val companion = sym.companionModule
+ companion.moduleClass match {
+ case mc: ModuleClassSymbol =>
+ val infos =
+ for (im <- mc.implicitMembers) yield new ImplicitInfo(im.name, singleType(pre, companion), im)
+ if (infos.nonEmpty)
+ infoMap += (sym -> infos)
+ case _ =>
+ }
+ }
+ val bts = tp.baseTypeSeq
+ var i = 1
+ while (i < bts.length) {
+ getParts(bts(i), infoMap, seen)
+ i += 1
+ }
+ getParts(pre, infoMap, seen)
+ }
+ }
+
+ /** Enter all parts of `tp` into `parts` set.
+ * This method is performance critical: about 2-4% of all type checking is spent here
+ */
+ def getParts(tp: Type, infoMap: InfoMap, seen: mutable.Set[Type]) {
+ if (seen(tp))
+ return
+ seen += tp
+ tp match {
+ case TypeRef(pre, sym, args) =>
+ if (sym.isClass) {
+ if (!((sym.name == tpnme.REFINE_CLASS_NAME) ||
+ (sym.name startsWith tpnme.ANON_CLASS_NAME) ||
+ (sym.name == tpnme.ROOT))) {
+ if (sym.isStatic)
+ infoMap ++= {
+ infoMapCache get sym match {
+ case Some(imap) => imap
+ case None =>
+ infoMapCache(sym) = LinkedHashMap.empty // to break cycles
+ val result = new InfoMap
+ getClassParts(sym.tpe, result, new mutable.HashSet[Type]())
+ infoMapCache(sym) = result
+ result
+ }
+ }
+ else
+ getClassParts(tp, infoMap, seen)
+ args foreach (getParts(_, infoMap, seen))
+ }
+ } else if (sym.isAliasType) {
+ getParts(tp.normalize, infoMap, seen)
+ } else if (sym.isAbstractType) {
+ getParts(tp.bounds.hi, infoMap, seen)
+ }
+ case ThisType(_) =>
+ getParts(tp.widen, infoMap, seen)
+ case _: SingletonType =>
+ getParts(tp.widen, infoMap, seen)
+ case RefinedType(ps, _) =>
+ for (p <- ps) getParts(p, infoMap, seen)
+ case AnnotatedType(_, t, _) =>
+ getParts(t, infoMap, seen)
+ case ExistentialType(_, t) =>
+ getParts(t, infoMap, seen)
+ case PolyType(_, t) =>
+ getParts(t, infoMap, seen)
+ case _ =>
+ }
+ }
+
+ getParts(tp, infoMap, seen)
+ if (settings.verbose.value)
+ println("companion implicits of "+tp+" = "+infoMap) // DEBUG
+ infoMap
+ }
+
+ /** The parts of a type is the smallest set of types that contains
+ * - the type itself
+ * - the parts of its immediate components (prefix and argument)
+ * - the parts of its base types
+ * - for alias types and abstract types, we take instead the parts
+ * - of their upper bounds.
+ * @return For those parts that refer to classes with companion objects that
+ * can be accessed with unambiguous stable prefixes, the implicits infos
+ * which are members of these companion objects.
+
private def companionImplicits(tp: Type): Infoss = {
val partMap = new LinkedHashMap[Symbol, Type]
val seen = mutable.HashSet[Type]() // cycle detection
@@ -815,6 +932,8 @@ trait Implicits {
buf.toList
}
+*/
+
/** The implicits made available by type `pt`.
* These are all implicits found in companion objects of classes C
* such that some part of `tp` has C as one of its superclasses.
@@ -826,12 +945,19 @@ trait Implicits {
case None =>
incCounter(implicitCacheMisses)
val start = startTimer(subtypeETNanos)
- val implicitInfoss = companionImplicits(pt)
+// val implicitInfoss = companionImplicits(pt)
+ val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
+// val is1 = implicitInfoss.flatten.toSet
+// val is2 = implicitInfoss1.flatten.toSet
+// for (i <- is1)
+// if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+// for (i <- is2)
+// if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
stopTimer(subtypeETNanos, start)
- implicitsCache(pt) = implicitInfoss
+ implicitsCache(pt) = implicitInfoss1
if (implicitsCache.size >= sizeLimit)
implicitsCache -= implicitsCache.keysIterator.next
- implicitInfoss
+ implicitInfoss1
}
/** Creates a tree that calls the relevant factory method in object
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 296e555559..d198303d66 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -234,14 +234,6 @@ trait Infer {
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(tp1, tp2)(global.explainTypes(tp1, tp2))
- def accessError(tree: Tree, sym: Symbol, pre: Type, explanation: String): Tree = {
- val realsym = underlying(sym)
-
- errorTree(tree, realsym + realsym.locationString + " cannot be accessed in " +
- (if (sym.isClassConstructor) context.enclClass.owner else pre.widen) +
- explanation)
- }
-
/* -- Tests & Checks---------------------------------------------------- */
/** Check that <code>sym</code> is defined and accessible as a member of
@@ -258,16 +250,19 @@ trait Infer {
if (context.unit != null)
context.unit.depends += sym.toplevelClass
- val sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
+ var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
// Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
+ if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
+ sym1 = sym
+
if (sym1 == NoSymbol) {
if (settings.debug.value) {
Console.println(context)
Console.println(tree)
Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
}
- accessError(tree, sym, pre,
+ new AccessError(tree, sym, pre,
if (settings.check.isDefault) {
analyzer.lastAccessCheckDetails
} else {
@@ -295,10 +290,10 @@ trait Infer {
if (settings.debug.value) ex.printStackTrace
val sym2 = underlying(sym1)
val itype = pre.memberType(sym2)
- accessError(tree, sym, pre,
+ new AccessError(tree, sym, pre,
"\n because its instance type "+itype+
(if ("malformed type: "+itype.toString==ex.msg) " is malformed"
- else " contains a "+ex.msg))
+ else " contains a "+ex.msg)).emit()
ErrorType
}
if (pre.isInstanceOf[SuperType])
@@ -1697,6 +1692,21 @@ trait Infer {
// Side effects tree with symbol and type
tree setSymbol resSym setType resTpe
}
+
+ case class AccessError(tree: Tree, sym: Symbol, pre: Type, explanation: String) extends Tree {
+ override def pos = tree.pos
+ override def hasSymbol = tree.hasSymbol
+ override def symbol = tree.symbol
+ override def symbol_=(x: Symbol) = tree.symbol = x
+ setError(this)
+
+ def emit(): Tree = {
+ val realsym = underlying(sym)
+ errorTree(tree, realsym + realsym.locationString + " cannot be accessed in " +
+ (if (sym.isClassConstructor) context.enclClass.owner else pre.widen) +
+ explanation)
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index f966e1476c..3b3eb74745 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -557,7 +557,7 @@ trait Namers { self: Analyzer =>
}
case _ =>
}
- sym.setInfo(tp)
+ sym.setInfo(if (sym.isJavaDefined) RestrictJavaArraysMap(tp) else tp)
if ((sym.isAliasType || sym.isAbstractType) && !sym.isParameter &&
!typer.checkNonCyclic(tree.pos, tp))
sym.setInfo(ErrorType) // this early test is there to avoid infinite baseTypes when
@@ -1279,6 +1279,20 @@ trait Namers { self: Analyzer =>
}
}
+ /** Convert Java generic array type T[] to (T with Object)[]
+ * (this is necessary because such arrays have a representation which is incompatible
+ * with arrays of primitive types.)
+ */
+ private object RestrictJavaArraysMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, ArrayClass, List(elemtp))
+ if elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe) =>
+ TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, definitions.ObjectClass.tpe))))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
/** Check that symbol's definition is well-formed. This means:
* - no conflicting modifiers
* - `abstract' modifier only for classes
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 3afbc5640b..fb71c8caae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -183,7 +183,7 @@ trait Typers extends Modes {
namerCache
}
- private[typechecker] var context = context0
+ var context = context0
def context1 = context
/** Check that <code>tree</code> is a stable expression.
@@ -991,6 +991,32 @@ trait Typers extends Modes {
doAdapt(pt)
}
+ /** Try o apply an implicit conversion to `qual' to that it contains
+ * a method `name`. If that's ambiguous try taking arguments into account using `adaptToArguments`.
+ */
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int): Tree = {
+ try {
+ adaptToMember(qual, HasMember(name))
+ } catch {
+ case ex: TypeError =>
+ // this happens if implicits are ambiguous; try again with more context info.
+ // println("last ditch effort: "+qual+" . "+name)
+ context.tree match {
+ case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => // try handling the arguments
+ // println("typing args: "+args)
+ silent(_.typedArgs(args, mode)) match {
+ case args: List[_] =>
+ adaptToArguments(qual, name, args.asInstanceOf[List[Tree]], WildcardType)
+ case _ =>
+ throw ex
+ }
+ case _ =>
+ // println("not in an apply: "+context.tree+"/"+tree)
+ throw ex
+ }
+ }
+ }
+
/** Try to apply an implicit conversion to `qual' to that it contains a
* member `name` of arbitrary type.
* If no conversion is found, return `qual' unchanged.
@@ -2712,10 +2738,13 @@ trait Typers extends Modes {
error(t.pos, "unexpected tree after typing annotation: "+ typedAnn)
}
- if (annType.typeSymbol == DeprecatedAttr && (argss.isEmpty || argss.head.isEmpty))
- unit.deprecationWarning(ann.pos,
- "the `deprecated' annotation now takes a (message: String) as parameter\n"+
- "indicating the reason for deprecation. That message is printed to the console and included in scaladoc.")
+ if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
+ unit.deprecationWarning(ann.pos, """
+ |The `deprecated` annotation now takes two String parameters: the first is
+ |an explanation and/or recommended alternative, which will be printed to the
+ |console and also appear in the scaladoc. The second is the first released
+ |version in which the member was deprecated.""".trim.stripMargin
+ )
if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) annotationError
else annInfo(typedAnn)
@@ -3452,8 +3481,6 @@ trait Typers extends Modes {
* @return ...
*/
def typedSelect(qual: Tree, name: Name): Tree = {
-
-
val sym =
if (tree.symbol != NoSymbol) {
if (phase.erasedTypes && qual.isInstanceOf[Super])
@@ -3472,26 +3499,9 @@ trait Typers extends Modes {
member(qual, name)
}
if (sym == NoSymbol && name != nme.CONSTRUCTOR && (mode & EXPRmode) != 0) {
- val qual1 = try {
- adaptToName(qual, name)
- } catch {
- case ex: TypeError =>
- // this happens if implicits are ambiguous; try again with more context info.
- // println("last ditch effort: "+qual+" . "+name)
- context.tree match {
- case Apply(tree1, args) if tree1 eq tree => // try handling the arguments
- // println("typing args: "+args)
- silent(_.typedArgs(args, mode)) match {
- case args: List[_] =>
- adaptToArguments(qual, name, args.asInstanceOf[List[Tree]], WildcardType)
- case _ =>
- throw ex
- }
- case _ =>
- // println("not in an apply: "+context.tree+"/"+tree)
- throw ex
- }
- }
+ val qual1 =
+ if (member(qual, name) != NoSymbol) qual
+ else adaptToMemberWithArgs(tree, qual, name, mode)
if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
@@ -3567,6 +3577,12 @@ trait Typers extends Modes {
qual // you only get to see the wrapped tree after running this check :-p
}) setType qual.tpe,
name)
+ case accErr: Inferencer#AccessError =>
+ val qual1 =
+ try adaptToMemberWithArgs(tree, qual, name, mode)
+ catch { case _: TypeError => qual }
+ if (qual1 ne qual) typed(Select(qual1, name) setPos tree.pos, mode, pt)
+ else accErr.emit()
case _ =>
result
}
@@ -3713,10 +3729,10 @@ trait Typers extends Modes {
if (inaccessibleSym eq NoSymbol) {
error(tree.pos, "not found: "+decodeWithKind(name, context.owner))
}
- else accessError(
+ else new AccessError(
tree, inaccessibleSym, context.enclClass.owner.thisType,
inaccessibleExplanation
- )
+ ).emit()
defSym = context.owner.newErrorSymbol(name)
}
}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index 86b9ca5cda..b6e61a4014 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -159,6 +159,7 @@ object Statistics {
val implicitCacheHits = new Counter
val implicitCacheMisses = new Counter
val improvesCount = new Counter
+ val improvesCachedCount = new Counter
val subtypeAppInfos = new SubCounter(subtypeCount)
val subtypeImprovCount = new SubCounter(subtypeCount)
val subtypeETNanos = new Timer
@@ -260,6 +261,7 @@ abstract class Statistics {
inform("#implicit searches : " + implicitSearchCount)
inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
inform("#implicit improves tests : " + improvesCount)
+ inform("#implicit improves cached: " + improvesCachedCount)
inform("#implicit inscope hits : " + inscopeImplicitHits)
inform("#implicit oftype hits : " + oftypeImplicitHits)
}
diff --git a/src/jline/pom.xml b/src/jline/pom.xml
deleted file mode 100644
index 08a59b054d..0000000000
--- a/src/jline/pom.xml
+++ /dev/null
@@ -1,273 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.sonatype.forge</groupId>
- <artifactId>forge-parent</artifactId>
- <version>6</version>
- </parent>
-
- <groupId>org.sonatype.jline</groupId>
- <artifactId>jline</artifactId>
- <name>JLine</name>
- <version>2.6-SNAPSHOT</version>
-
- <organization>
- <name>Sonatype</name>
- <url>http://sonatype.org</url>
- </organization>
-
- <licenses>
- <license>
- <name>The BSD License</name>
- <url>http://www.opensource.org/licenses/bsd-license.php</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
-
- <scm>
- <connection>scm:git:git://github.com/jdillon/jline2.git</connection>
- <developerConnection>scm:git:ssh://git@github.com/jdillon/jline2.git</developerConnection>
- <url>http://github.com/jdillon/jline2</url>
- </scm>
-
- <ciManagement>
- <system>Hudson</system>
- <url>https://grid.sonatype.org/ci/job/JLine2</url>
- </ciManagement>
-
- <developers>
- <developer>
- <id>jdillon</id>
- <name>Jason Dillon</name>
- <email>jason@planet57.com</email>
- <roles>
- <role>Build Master</role>
- <role>Developer</role>
- </roles>
- </developer>
- </developers>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- </properties>
-
- <!--
- <repositories>
- <repository>
- <id>jansi</id>
- <url>http://jansi.fusesource.org/repo/snapshot</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </repository>
- </repositories>
- -->
-
- <dependencies>
- <dependency>
- <groupId>org.fusesource.jansi</groupId>
- <artifactId>jansi</artifactId>
- <version>1.4</version>
- <!--<scope>provided</scope>-->
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <defaultGoal>install</defaultGoal>
-
- <resources>
- <resource>
- <directory>${project.basedir}/src/main/resources</directory>
- <filtering>false</filtering>
- <includes>
- <include>**/*</include>
- </includes>
- </resource>
-
- <resource>
- <directory>${project.basedir}/src/main/filtered-resources</directory>
- <filtering>true</filtering>
- <includes>
- <include>**/*</include>
- </includes>
- </resource>
- </resources>
-
- <testResources>
- <testResource>
- <directory>${project.basedir}/src/test/resources</directory>
- <filtering>false</filtering>
- <includes>
- <include>**/*</include>
- </includes>
- </testResource>
-
- <testResource>
- <directory>${project.basedir}/src/test/filtered-resources</directory>
- <filtering>true</filtering>
- <includes>
- <include>**/*</include>
- </includes>
- </testResource>
- </testResources>
-
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.6</version>
- <configuration>
- <redirectTestOutputToFile>true</redirectTestOutputToFile>
- <forkMode>once</forkMode>
- <argLine>-ea</argLine>
- <failIfNoTests>false</failIfNoTests>
- <workingDirectory>${project.build.directory}</workingDirectory>
- <excludes>
- <exclude>**/Abstract*.java</exclude>
- <exclude>**/Test*.java</exclude>
- </excludes>
- <includes>
- <include>**/*Test.java</include>
- </includes>
- </configuration>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>2.3.2</version>
- <configuration>
- <source>1.5</source>
- <target>1.5</target>
- </configuration>
- </plugin>
-
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <version>2.1.0</version>
- <executions>
- <execution>
- <phase>process-classes</phase>
- <goals>
- <goal>manifest</goal>
- </goals>
- <configuration>
- <instructions>
- <Import-Package>!scala.tools.jline*,javax.swing;resolution:=optional,*</Import-Package>
- <DynamicImport-Package>*</DynamicImport-Package>
- </instructions>
- </configuration>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>2.3.1</version>
- <configuration>
- <archive>
- <manifestFile>${project.build.outputDirectory}/META-INF/MANIFEST.MF</manifestFile>
- </archive>
- </configuration>
- <executions>
- <execution>
- <goals>
- <goal>test-jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-scm-plugin</artifactId>
- <version>1.4</version>
- </plugin>
-
- <!-- include all the dependencies into the jar so it can run standalone -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <version>1.4</version>
- <executions>
- <execution>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <artifactSet>
- <excludes>
- <exclude>junit:junit</exclude>
- </excludes>
- </artifactSet>
- <filters>
- <filter>
- <artifact>org.fusesource.jansi:jansi</artifact>
- <excludes>
- <exclude>META-INF/maven/**</exclude>
- <exclude>*.txt</exclude>
- <exclude>junit/**</exclude>
- <exclude>org/junit/**</exclude>
- <exclude>org/hamcrest/**</exclude>
- <exclude>org/fusesource/hawtjni/runtime/Jni*</exclude>
- <exclude>org/fusesource/hawtjni/runtime/*Flag*</exclude>
- <exclude>org/fusesource/hawtjni/runtime/T32*</exclude>
- <exclude>org/fusesource/hawtjni/runtime/NativeStats*</exclude>
- </excludes>
- </filter>
- </filters>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
- <profiles>
- <profile>
- <id>retro</id>
- <activation>
- <property>
- <name>retro</name>
- <value>true</value>
- </property>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>retrotranslator-maven-plugin</artifactId>
- <version>1.0-alpha-4</version>
- <executions>
- <execution>
- <goals>
- <goal>translate-project</goal>
- </goals>
- <configuration>
- <classifier>jdk14</classifier>
- <attach>true</attach>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
-
-</project> \ No newline at end of file
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
index 3ecffcd808..89d3b4a498 100644
--- a/src/jline/project/build.properties
+++ b/src/jline/project/build.properties
@@ -1,8 +1,8 @@
#Project properties
+#Wed Mar 23 21:05:24 PDT 2011
project.organization=org.improving
project.name=jline
-sbt.version=0.7.5.RC0
+sbt.version=0.7.6.RC0
project.version=0.98
build.scala.versions=2.8.1
-/*build.scala.versions=2.9.0-SNAPSHOT*/
project.initialize=false
diff --git a/src/jline/project/plugins/project/build.properties b/src/jline/project/plugins/project/build.properties
index 0b7014c531..24481fef8e 100644
--- a/src/jline/project/plugins/project/build.properties
+++ b/src/jline/project/plugins/project/build.properties
@@ -1,3 +1,3 @@
#Project properties
-#Thu Feb 10 14:58:03 PST 2011
+#Tue Apr 05 12:32:56 PDT 2011
plugin.uptodate=true
diff --git a/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java b/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java
index 1ca12cb73f..03004fea68 100644
--- a/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java
+++ b/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java
@@ -24,6 +24,10 @@ public abstract class TerminalSupport
implements Terminal
{
public static String DEFAULT_KEYBINDINGS_PROPERTIES = "keybindings.properties";
+ public static String DEFAULT_KEYBINDINGS_PROPERTIES_MAC = "macbindings.properties";
+ public static boolean isMac() {
+ return System.getProperty("os.name").toLowerCase().startsWith("mac");
+ }
public static final String JLINE_SHUTDOWNHOOK = "jline.shutdownhook";
@@ -157,7 +161,11 @@ public abstract class TerminalSupport
}
public InputStream getDefaultBindings() {
- return TerminalSupport.class.getResourceAsStream(DEFAULT_KEYBINDINGS_PROPERTIES);
+ // Mac bindings are slightly different from Unix/Linux.
+ // For instance, the Delete key behavior is different between them.
+ return TerminalSupport.class.getResourceAsStream(
+ isMac() ? DEFAULT_KEYBINDINGS_PROPERTIES_MAC : DEFAULT_KEYBINDINGS_PROPERTIES
+ );
}
//
diff --git a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java b/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
index 10c98888d3..71659c5a42 100644
--- a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
+++ b/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
@@ -166,6 +166,16 @@ public class UnixTerminal
return DELETE.code;
}
}
+ else if (c == 'b') { // alt-b: go back a word
+ return CTRL_O.code; // PREV_WORD
+ }
+ else if (c == 'f') { // alt-f: go forward a word
+ return CTRL_T.code; // NEXT_WORD
+ }
+ else if (key == DEL) { // alt-backspace: delete previous word
+ return CTRL_W.code; // DELETE_PREV_WORD
+ }
+
}
// handle unicode characters, thanks for a patch from amyi@inf.ed.ac.uk
@@ -205,7 +215,10 @@ public class UnixTerminal
DEL_THIRD(51),
- DEL_SECOND(126),;
+ DEL_SECOND(126),
+
+ DEL(127);
+
public final short code;
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
index 861c2d58bd..5e6f5c166a 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
@@ -85,7 +85,7 @@ public class ConsoleReader
{
this.in = in;
this.terminal = term != null ? term : TerminalFactory.get();
- this.out = new PrintWriter(terminal.wrapOutIfNeeded(out));
+ this.out = new PrintWriter(getTerminal().wrapOutIfNeeded(out));
this.keyBindings = loadKeyBindings(bindings);
setBellEnabled(!Configuration.getBoolean(JLINE_NOBELL, false));
@@ -555,9 +555,9 @@ public class ConsoleReader
if (mask != null) {
Arrays.fill(chars, mask);
}
- if (terminal.hasWeirdWrap()) {
+ if (getTerminal().hasWeirdWrap()) {
// need to determine if wrapping will occur:
- int width = terminal.getWidth();
+ int width = getTerminal().getWidth();
int pos = getCursorPosition();
for (int i = 0; i < chars.length; i++) {
print(chars[i]);
@@ -570,7 +570,7 @@ public class ConsoleReader
print(chars);
}
clearAhead(clear, chars.length);
- if (terminal.isAnsiSupported()) {
+ if (getTerminal().isAnsiSupported()) {
if (chars.length > 0) {
back(chars.length);
}
@@ -578,8 +578,8 @@ public class ConsoleReader
back(chars.length);
}
}
- if (terminal.hasWeirdWrap()) {
- int width = terminal.getWidth();
+ if (getTerminal().hasWeirdWrap()) {
+ int width = getTerminal().getWidth();
// best guess on whether the cursor is in that weird location...
// Need to do this without calling ansi cursor location methods
// otherwise it breaks paste of wrapped lines in xterm.
@@ -614,8 +614,8 @@ public class ConsoleReader
return;
}
- if (terminal.isAnsiSupported()) {
- int width = terminal.getWidth();
+ if (getTerminal().isAnsiSupported()) {
+ int width = getTerminal().getWidth();
int screenCursorCol = getCursorPosition() + delta;
// clear current line
printAnsiSequence("K");
@@ -652,7 +652,7 @@ public class ConsoleReader
*/
protected void back(final int num) throws IOException {
if (num == 0) return;
- if (terminal.isAnsiSupported()) {
+ if (getTerminal().isAnsiSupported()) {
int width = getTerminal().getWidth();
int cursor = getCursorPosition();
int realCursor = cursor + num;
@@ -700,7 +700,7 @@ public class ConsoleReader
count = moveCursor(-1 * num) * -1;
buf.buffer.delete(buf.cursor, buf.cursor + count);
if (getCursorPosition() / termwidth != lines) {
- if (terminal.isAnsiSupported()) {
+ if (getTerminal().isAnsiSupported()) {
// debug("doing backspace redraw: " + getCursorPosition() + " on " + termwidth + ": " + lines);
printAnsiSequence("K");
// if cursor+num wraps, then we need to clear the line(s) below too
@@ -828,7 +828,7 @@ public class ConsoleReader
// + buf.cursor + " => " + (buf.cursor + where) + ")");
buf.cursor += where;
- if (terminal.isAnsiSupported()) {
+ if (getTerminal().isAnsiSupported()) {
if (where < 0) {
back(Math.abs(where));
} else {
@@ -837,12 +837,12 @@ public class ConsoleReader
int oldLine = (cursor - where) / width;
int newLine = cursor / width;
if (newLine > oldLine) {
- if (terminal.hasWeirdWrap()) {
+ if (getTerminal().hasWeirdWrap()) {
// scroll up if at bottom
// note:
- // on rxvt cywgin terminal.getHeight() is incorrect
+ // on rxvt cywgin getTerminal().getHeight() is incorrect
// MacOs xterm does not seem to support scrolling
- if (getCurrentAnsiRow() == terminal.getHeight()) {
+ if (getCurrentAnsiRow() == getTerminal().getHeight()) {
printAnsiSequence((newLine - oldLine) + "S");
}
}
@@ -918,7 +918,7 @@ public class ConsoleReader
* @return the character, or -1 if an EOF is received.
*/
public final int readVirtualKey() throws IOException {
- int c = terminal.readVirtualKey(in);
+ int c = getTerminal().readVirtualKey(in);
Log.trace("Keystroke: ", c);
@@ -933,7 +933,7 @@ public class ConsoleReader
*/
private int clearEcho(final int c) throws IOException {
// if the terminal is not echoing, then ignore
- if (!terminal.isEchoEnabled()) {
+ if (!getTerminal().isEchoEnabled()) {
return 0;
}
@@ -1171,7 +1171,7 @@ public class ConsoleReader
}
try {
- if (!terminal.isSupported()) {
+ if (!getTerminal().isSupported()) {
beforeReadLine(prompt, mask);
}
@@ -1181,7 +1181,7 @@ public class ConsoleReader
}
// if the terminal is unsupported, just use plain-java reading
- if (!terminal.isSupported()) {
+ if (!getTerminal().isSupported()) {
return readLine(in);
}
@@ -1283,7 +1283,7 @@ public class ConsoleReader
if (buf.buffer.length() == 0) {
return null;
} else {
- deleteCurrentCharacter();
+ success = deleteCurrentCharacter();
}
break;
@@ -1428,7 +1428,7 @@ public class ConsoleReader
}
}
finally {
- if (!terminal.isSupported()) {
+ if (!getTerminal().isSupported()) {
afterReadLine();
}
}
@@ -1756,7 +1756,7 @@ public class ConsoleReader
* Clear the screen by issuing the ANSI "clear screen" code.
*/
public boolean clearScreen() throws IOException {
- if (!terminal.isAnsiSupported()) {
+ if (!getTerminal().isAnsiSupported()) {
return false;
}
@@ -2109,7 +2109,7 @@ public class ConsoleReader
// return column position, reported by the terminal
private int getCurrentPosition() {
// check for ByteArrayInputStream to disable for unit tests
- if (terminal.isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
+ if (getTerminal().isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
try {
printAnsiSequence("6n");
flush();
@@ -2136,7 +2136,7 @@ public class ConsoleReader
// wrapping terminals - not tested for anything else
private int getCurrentAnsiRow() {
// check for ByteArrayInputStream to disable for unit tests
- if (terminal.isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
+ if (getTerminal().isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
try {
printAnsiSequence("6n");
flush();
diff --git a/src/jline/src/main/java/scala/tools/jline/console/Key.java b/src/jline/src/main/java/scala/tools/jline/console/Key.java
index 5c13d19860..26528555df 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/Key.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/Key.java
@@ -40,8 +40,14 @@ public enum Key
CTRL_N(14),
+ CTRL_O(15),
+
CTRL_P(16),
+ CTRL_T(20),
+
+ CTRL_W(23),
+
CTRL_OB(27),
CTRL_QM(127),
diff --git a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties b/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
index 610a1626aa..ebb5c8818a 100644
--- a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
+++ b/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
@@ -51,6 +51,9 @@
# CTRL-R: redraw the current line
18=SEARCH_PREV
+# CTRL-T: move to next word
+20=NEXT_WORD
+
# CTRL-U: delete all the characters before the cursor position
21=KILL_LINE_PREV
diff --git a/src/jline/src/main/resources/scala/tools/jline/macbindings.properties b/src/jline/src/main/resources/scala/tools/jline/macbindings.properties
new file mode 100644
index 0000000000..8e810a8364
--- /dev/null
+++ b/src/jline/src/main/resources/scala/tools/jline/macbindings.properties
@@ -0,0 +1,62 @@
+# Keybinding mapping for JLine. The format is:
+# [key code]: [logical operation]
+
+# CTRL-B: move to the previous character
+2: PREV_CHAR
+
+# CTRL-G: move to the previous word
+7: PREV_WORD
+
+# CTRL-F: move to the next character
+6: NEXT_CHAR
+
+# CTRL-A: move to the beginning of the line
+1: MOVE_TO_BEG
+
+# CTRL-D: close out the input stream
+4: EXIT
+
+# CTRL-E: move the cursor to the end of the line
+5: MOVE_TO_END
+
+# BACKSPACE, CTRL-H: delete the previous character
+# 8 is the ASCII code for backspace and therefor
+# deleting the previous character
+8: DELETE_PREV_CHAR
+
+# TAB, CTRL-I: signal that console completion should be attempted
+9: COMPLETE
+
+# CTRL-J, CTRL-M: newline
+10: NEWLINE
+
+# CTRL-K: erase the current line
+11: KILL_LINE
+
+# ENTER: newline
+13: NEWLINE
+
+# CTRL-L: clear screen
+12: CLEAR_SCREEN
+
+# CTRL-N: scroll to the next element in the history buffer
+14: NEXT_HISTORY
+
+# CTRL-P: scroll to the previous element in the history buffer
+16: PREV_HISTORY
+
+# CTRL-R: redraw the current line
+18: REDISPLAY
+
+# CTRL-U: delete all the characters before the cursor position
+21: KILL_LINE_PREV
+
+# CTRL-V: paste the contents of the clipboard (useful for Windows terminal)
+22: PASTE
+
+# CTRL-W: delete the word directly before the cursor
+23: DELETE_PREV_WORD
+
+# DELETE, CTRL-?: delete the previous character
+# 127 is the ASCII code for delete
+127: DELETE_NEXT_CHAR
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
index 2060b509b8..d3c024b38e 100644
--- a/src/library/scala/Application.scala
+++ b/src/library/scala/Application.scala
@@ -67,7 +67,7 @@ import scala.compat.Platform.currentTime
* @author Matthias Zenger
* @version 1.0, 10/09/2003
*/
-@deprecated("use App instead")
+@deprecated("use App instead", "2.9.0")
trait Application {
/** The time when the execution of this program started, in milliseconds since 1
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 1bdc5b23c9..66ce9320bd 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -423,7 +423,7 @@ object Array extends FallbackArrayBuilding {
* @param elem the element composing the resulting array
* @return an array composed of n elements all equal to elem
*/
- @deprecated("use `Array.fill' instead")
+ @deprecated("use `Array.fill' instead", "2.8.0")
def make[T: ClassManifest](n: Int, elem: T): Array[T] = {
val a = new Array[T](n)
var i = 0
@@ -437,7 +437,7 @@ object Array extends FallbackArrayBuilding {
/** Creates an array containing the values of a given function `f`
* over given range `[0..n)`
*/
- @deprecated("use `Array.tabulate' instead")
+ @deprecated("use `Array.tabulate' instead", "2.8.0")
def fromFunction[T: ClassManifest](f: Int => T)(n: Int): Array[T] = {
val a = new Array[T](n)
var i = 0
@@ -451,28 +451,28 @@ object Array extends FallbackArrayBuilding {
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2)`
*/
- @deprecated("use `Array.tabulate' instead")
+ @deprecated("use `Array.tabulate' instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int) => T)(n1: Int, n2: Int): Array[Array[T]] =
fromFunction(i => fromFunction(f(i, _))(n2))(n1)
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2, 0..n3)`
*/
- @deprecated("use `Array.tabulate' instead")
+ @deprecated("use `Array.tabulate' instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
fromFunction(i => fromFunction(f(i, _, _))(n2, n3))(n1)
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2, 0..n3, 0..n4)`
*/
- @deprecated("use `Array.tabulate' instead")
+ @deprecated("use `Array.tabulate' instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
fromFunction(i => fromFunction(f(i, _, _, _))(n2, n3, n4))(n1)
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2, 0..n3, 0..n4, 0..n5)`
*/
- @deprecated("use `Array.tabulate' instead")
+ @deprecated("use `Array.tabulate' instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
fromFunction(i => fromFunction(f(i, _, _, _, _))(n2, n3, n4, n5))(n1)
}
@@ -486,56 +486,56 @@ object Array extends FallbackArrayBuilding {
final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable {
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int) = {
this(dim1);
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int, dim8: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead")
+ @deprecated("use `Array.ofDim' instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int, dim8: Int, dim9: Int) = {
this(dim1)
throw new Error()
diff --git a/src/library/scala/Cell.scala b/src/library/scala/Cell.scala
index 018710f054..f4fc3e3e77 100644
--- a/src/library/scala/Cell.scala
+++ b/src/library/scala/Cell.scala
@@ -17,5 +17,5 @@ package scala
* @author Martin Odersky
* @version 1.0, 08/08/2003
*/
-@deprecated("use `scala.Option` or `scala.Some` instead")
+@deprecated("use `scala.Option` or `scala.Some` instead", "2.9.0")
case class Cell[+T](elem: T)
diff --git a/src/library/scala/CountedIterator.scala b/src/library/scala/CountedIterator.scala
index 0d74ab3820..6f2c597169 100644
--- a/src/library/scala/CountedIterator.scala
+++ b/src/library/scala/CountedIterator.scala
@@ -14,7 +14,7 @@ package scala
*
* @since 2.0
*/
-@deprecated("use iterator.zipWithIndex instead")
+@deprecated("use iterator.zipWithIndex instead", "2.8.0")
trait CountedIterator[+A] extends Iterator[A] {
/** counts the elements in this iterator; counts start at 0
*/
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 6fd7b0984b..5f2e01063f 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -129,7 +129,7 @@ object Double extends AnyValCompanion {
final val PositiveInfinity = java.lang.Double.POSITIVE_INFINITY
final val NegativeInfinity = java.lang.Double.NEGATIVE_INFINITY
- @deprecated("use Double.MinPositiveValue instead")
+ @deprecated("use Double.MinPositiveValue instead", "2.9.0")
final val Epsilon = MinPositiveValue
/** The negative number with the greatest (finite) absolute value which is representable
diff --git a/src/library/scala/Either.scala b/src/library/scala/Either.scala
index 2fe81c0d84..29b75a4c82 100644
--- a/src/library/scala/Either.scala
+++ b/src/library/scala/Either.scala
@@ -311,11 +311,11 @@ object Either {
}
}
- @deprecated("use `x.joinLeft'")
+ @deprecated("use `x.joinLeft'", "2.8.0")
def joinLeft[A, B](es: Either[Either[A, B], B]) =
es.left.flatMap(x => x)
- @deprecated("use `x.joinRight'")
+ @deprecated("use `x.joinRight'", "2.8.0")
def joinRight[A, B](es: Either[A, Either[A, B]]) =
es.right.flatMap(x => x)
@@ -323,7 +323,7 @@ object Either {
* Takes an `Either` to its contained value within `Left` or
* `Right`.
*/
- @deprecated("use `x.merge'")
+ @deprecated("use `x.merge'", "2.8.0")
def merge[T](e: Either[T, T]) = e match {
case Left(t) => t
case Right(t) => t
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index 38819e5d16..f403e5ccab 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -129,7 +129,7 @@ object Float extends AnyValCompanion {
final val PositiveInfinity = java.lang.Float.POSITIVE_INFINITY
final val NegativeInfinity = java.lang.Float.NEGATIVE_INFINITY
- @deprecated("use Float.MinPositiveValue instead")
+ @deprecated("use Float.MinPositiveValue instead", "2.9.0")
final val Epsilon = MinPositiveValue
/** The negative number with the greatest (finite) absolute value which is representable
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index ff0769b84b..a6e2a2d056 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -50,7 +50,7 @@ object Function {
* @param f ...
* @return ...
*/
- @deprecated("Use `f.curried` instead")
+ @deprecated("Use `f.curried` instead", "2.8.0")
def curried[a1, a2, b](f: (a1, a2) => b): a1 => a2 => b = {
x1 => x2 => f(x1, x2)
}
@@ -60,21 +60,21 @@ object Function {
* @param f ...
* @return ...
*/
- @deprecated("Use `f.curried` instead")
+ @deprecated("Use `f.curried` instead", "2.8.0")
def curried[a1, a2, a3, b](f: (a1, a2, a3) => b): a1 => a2 => a3 => b = {
x1 => x2 => x3 => f(x1, x2, x3)
}
/** Currying for functions of arity 4.
*/
- @deprecated("Use `f.curried` instead")
+ @deprecated("Use `f.curried` instead", "2.8.0")
def curried[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): a1 => a2 => a3 => a4 => b = {
x1 => x2 => x3 => x4 => f(x1, x2, x3, x4)
}
/** Currying for functions of arity 5.
*/
- @deprecated("Use `f.curried` instead")
+ @deprecated("Use `f.curried` instead", "2.8.0")
def curried[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): a1 => a2 => a3 => a4 => a5 => b = {
x1 => x2 => x3 => x4 => x5 => f(x1, x2, x3, x4, x5)
}
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index f8e68a2e04..a3220f9752 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Wed Mar 30 13:47:19 PDT 2011
+// genprod generated these sources at: Thu Apr 14 13:08:25 PDT 2011
package scala
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index c4b8764d58..632883f518 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -26,7 +26,7 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 10 arguments,
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 48ba6e2737..1c27ac757c 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -26,7 +26,7 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 11 arguments,
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index bb25c62ceb..7f53d87b33 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -26,7 +26,7 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 12 arguments,
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 19cdf0103c..9e05c6c69a 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -26,7 +26,7 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 13 arguments,
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 258dc230c3..5dae9116b3 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -26,7 +26,7 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 14 arguments,
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index 6bc63a1c24..b50974a868 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -26,7 +26,7 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 15 arguments,
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index f969e0d8ae..2dc726126b 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -26,7 +26,7 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 16 arguments,
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 5218f2fce7..520c9d1288 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -26,7 +26,7 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 17 arguments,
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index a68d5e27ae..8d72a3f487 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -26,7 +26,7 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 18 arguments,
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 8f22c00b76..3990bc60d6 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -26,7 +26,7 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 19 arguments,
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index f8dff7dc75..7d0b9a5195 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -39,7 +39,7 @@ trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @speciali
def curried: T1 => T2 => R = {
(x1: T1) => (x2: T2) => apply(x1, x2)
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 2 arguments,
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 11942c0766..94fa1cf56e 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -26,7 +26,7 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 20 arguments,
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index cb1a38fc06..f41d8899fc 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -26,7 +26,7 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 21 arguments,
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index b202a1f437..c1b290ff33 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -26,7 +26,7 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 22 arguments,
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index f67ed3cc66..1b592f5c88 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -26,7 +26,7 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self =>
def curried: T1 => T2 => T3 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3)
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 3 arguments,
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index 1e798bb9c5..bd1123276b 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -26,7 +26,7 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self =>
def curried: T1 => T2 => T3 => T4 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4)
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 4 arguments,
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index e347afa706..af83dcf303 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -26,7 +26,7 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
def curried: T1 => T2 => T3 => T4 => T5 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 5 arguments,
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 3818bac6ee..caceb55c33 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -26,7 +26,7 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self =>
def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 6 arguments,
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 6c03f51e81..16aa9a90b6 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -26,7 +26,7 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self =>
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 7 arguments,
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index eb5065835d..6f97cc182b 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -26,7 +26,7 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 8 arguments,
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index a5a132b52a..3c39cb472b 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -26,7 +26,7 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef
def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried
}
- @deprecated("Use 'curried' instead")
+ @deprecated("Use 'curried' instead", "2.8.0")
def curry = curried
/** Creates a tupled version of this function: instead of 9 arguments,
diff --git a/src/library/scala/Math.scala b/src/library/scala/Math.scala
index a1663a6a02..b8493185a2 100644
--- a/src/library/scala/Math.scala
+++ b/src/library/scala/Math.scala
@@ -13,83 +13,83 @@ package scala
* operations such as the elementary exponential, logarithm, square root, and
* trigonometric functions.
*/
-@deprecated("use the scala.math package object instead.\n(Example package object usage: scala.math.Pi )")
+@deprecated("use the scala.math package object instead.\n(Example package object usage: scala.math.Pi )", "2.8.0")
object Math extends MathCommon {
- @deprecated("Use scala.Byte.MinValue instead")
+ @deprecated("Use scala.Byte.MinValue instead", "2.8.0")
val MIN_BYTE = java.lang.Byte.MIN_VALUE
- @deprecated("Use scala.Byte.MaxValue instead")
+ @deprecated("Use scala.Byte.MaxValue instead", "2.8.0")
val MAX_BYTE = java.lang.Byte.MAX_VALUE
- @deprecated("Use scala.Short.MinValue instead")
+ @deprecated("Use scala.Short.MinValue instead", "2.8.0")
val MIN_SHORT = java.lang.Short.MIN_VALUE
- @deprecated("Use scala.Short.MaxValue instead")
+ @deprecated("Use scala.Short.MaxValue instead", "2.8.0")
val MAX_SHORT = java.lang.Short.MAX_VALUE
- @deprecated("Use scala.Char.MinValue instead")
+ @deprecated("Use scala.Char.MinValue instead", "2.8.0")
val MIN_CHAR = java.lang.Character.MIN_VALUE
- @deprecated("Use scala.Char.MaxValue instead")
+ @deprecated("Use scala.Char.MaxValue instead", "2.8.0")
val MAX_CHAR = java.lang.Character.MAX_VALUE
- @deprecated("Use scala.Int.MinValue instead")
+ @deprecated("Use scala.Int.MinValue instead", "2.8.0")
val MIN_INT = java.lang.Integer.MIN_VALUE
- @deprecated("Use scala.Int.MaxValue instead")
+ @deprecated("Use scala.Int.MaxValue instead", "2.8.0")
val MAX_INT = java.lang.Integer.MAX_VALUE
- @deprecated("Use scala.Long.MinValue instead")
+ @deprecated("Use scala.Long.MinValue instead", "2.8.0")
val MIN_LONG = java.lang.Long.MIN_VALUE
- @deprecated("Use scala.Long.MaxValue instead")
+ @deprecated("Use scala.Long.MaxValue instead", "2.8.0")
val MAX_LONG = java.lang.Long.MAX_VALUE
/** The smallest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MinValue instead")
+ @deprecated("Use scala.Float.MinValue instead", "2.8.0")
val MIN_FLOAT = -java.lang.Float.MAX_VALUE
/** The smallest difference between two values of <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MinPositiveValue instead")
+ @deprecated("Use scala.Float.MinPositiveValue instead", "2.8.0")
val EPS_FLOAT = java.lang.Float.MIN_VALUE
/** The greatest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MaxValue instead")
+ @deprecated("Use scala.Float.MaxValue instead", "2.8.0")
val MAX_FLOAT = java.lang.Float.MAX_VALUE
/** A value of type <a href="Float.html" target="_self">scala.Float</a> that represents no number. */
- @deprecated("Use scala.Float.NaN instead")
+ @deprecated("Use scala.Float.NaN instead", "2.8.0")
val NaN_FLOAT = java.lang.Float.NaN
/** Negative infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.NegativeInfinity instead")
+ @deprecated("Use scala.Float.NegativeInfinity instead", "2.8.0")
val NEG_INF_FLOAT = java.lang.Float.NEGATIVE_INFINITY
/** Positive infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.PositiveInfinity instead")
+ @deprecated("Use scala.Float.PositiveInfinity instead", "2.8.0")
val POS_INF_FLOAT = java.lang.Float.POSITIVE_INFINITY
/** The smallest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MinValue instead")
+ @deprecated("Use scala.Double.MinValue instead", "2.8.0")
val MIN_DOUBLE = -java.lang.Double.MAX_VALUE
/** The smallest difference between two values of <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MinPositiveValue instead")
+ @deprecated("Use scala.Double.MinPositiveValue instead", "2.8.0")
val EPS_DOUBLE = java.lang.Double.MIN_VALUE
/** The greatest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MaxValue instead")
+ @deprecated("Use scala.Double.MaxValue instead", "2.8.0")
val MAX_DOUBLE = java.lang.Double.MAX_VALUE
/** A value of type <a href="Double.html" target="_self">scala.Double</a> that represents no number. */
- @deprecated("Use scala.Double.NaN instead")
+ @deprecated("Use scala.Double.NaN instead", "2.8.0")
val NaN_DOUBLE = java.lang.Double.NaN
/** Negative infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.NegativeInfinity instead")
+ @deprecated("Use scala.Double.NegativeInfinity instead", "2.8.0")
val NEG_INF_DOUBLE = java.lang.Double.NEGATIVE_INFINITY
/** Positive infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.PositiveInfinity instead")
+ @deprecated("Use scala.Double.PositiveInfinity instead", "2.8.0")
val POS_INF_DOUBLE = java.lang.Double.POSITIVE_INFINITY
} \ No newline at end of file
diff --git a/src/library/scala/NotDefinedError.scala b/src/library/scala/NotDefinedError.scala
index 194d8ce626..1fd0e7ec49 100644
--- a/src/library/scala/NotDefinedError.scala
+++ b/src/library/scala/NotDefinedError.scala
@@ -13,5 +13,5 @@ package scala
/**
* @since 2.0
*/
-@deprecated("Use a custom Error class instead")
+@deprecated("Use a custom Error class instead", "2.8.0")
final class NotDefinedError(msg: String) extends Error("not defined: " + msg)
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 20473009bf..e03eab45a2 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -59,16 +59,16 @@ object Predef extends LowPriorityImplicits {
// Deprecated
- @deprecated("Use sys.error(message) instead")
+ @deprecated("Use sys.error(message) instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use sys.exit() instead")
+ @deprecated("Use sys.exit() instead", "2.9.0")
def exit(): Nothing = sys.exit()
- @deprecated("Use sys.exit(status) instead")
+ @deprecated("Use sys.exit(status) instead", "2.9.0")
def exit(status: Int): Nothing = sys.exit(status)
- @deprecated("Use formatString.format(args: _*) or arg.formatted(formatString) instead")
+ @deprecated("Use formatString.format(args: _*) or arg.formatted(formatString) instead", "2.9.0")
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
// errors and asserts -------------------------------------------------
@@ -358,7 +358,7 @@ object Predef extends LowPriorityImplicits {
}
// less useful due to #2781
- @deprecated("Use From => To instead")
+ @deprecated("Use From => To instead", "2.9.0")
sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
object <%< {
implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index dfa04bbbe7..1dbf46b4c2 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -42,7 +42,7 @@ trait Product extends Equals {
def next() = { val result = productElement(c); c += 1; result }
}
- @deprecated("use productIterator instead")
+ @deprecated("use productIterator instead", "2.8.0")
def productElements: Iterator[Any] = productIterator
/** A string used in the `toString` methods of derived classes.
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index afac96fe97..dd6ac0cfd2 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -30,7 +30,7 @@ case class Tuple2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, D
*/
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
- @deprecated("Use `zipped` instead.")
+ @deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, To](implicit w1: T1 => TLike[El1, Repr1],
w2: T2 => Iterable[El2],
cbf1: CBF[Repr1, (El1, El2), To]): To = {
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 6458fa5042..dfa0c962a2 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -26,7 +26,7 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
- @deprecated("Use `zipped` instead.")
+ @deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
w2: T2 => Iterable[El2],
w3: T3 => Iterable[El3],
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
index 9ae31c6462..5a0d1261d6 100644
--- a/src/library/scala/annotation/serializable.scala
+++ b/src/library/scala/annotation/serializable.scala
@@ -11,5 +11,5 @@ package scala.annotation
/**
* An annotation that designates the class to which it is applied as serializable
*/
-@deprecated("instead of `@serializable class C`, use `class C extends Serializable`")
+@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
class serializable extends annotation.StaticAnnotation
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index 3d0b8af2f2..dc634c67d3 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -11,8 +11,7 @@ package scala.collection
import parallel.Combiner
trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Parallelizable[A, ParRepr] {
- self: TraversableOnce[A] =>
-
override def par: ParRepr
override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("")
}
+
diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala
new file mode 100644
index 0000000000..8d735dd86d
--- /dev/null
+++ b/src/library/scala/collection/GenIterable.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for all iterable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenIterable[+A]
+extends GenIterableLike[A, GenIterable[A]]
+ with GenTraversable[A]
+ with GenericTraversableTemplate[A, GenIterable]
+{
+ def seq: Iterable[A]
+ override def companion: GenericCompanion[GenIterable] = GenIterable
+}
+
+
+object GenIterable extends TraversableFactory[GenIterable] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Iterable.newBuilder
+}
+
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
new file mode 100644
index 0000000000..d7b4b2ee5e
--- /dev/null
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -0,0 +1,143 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import generic.{ CanBuildFrom => CBF, _ }
+
+/** A template trait for all iterable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * This trait contains abstract methods and methods that can be implemented
+ * directly in terms of other methods.
+ *
+ * @define Coll GenIterable
+ * @define coll general iterable collection
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ * @define zipthatinfo the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `(A1, B)` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, B), That]`.
+ * is found.
+ * @define zipbfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `(A1, B)`.
+ * @define iterableInfo
+ * This is a base trait for all Scala collections that define an `iterator`
+ * method to step through one-by-one the collection's elements.
+ */
+private[collection] trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
+
+ def iterator: Iterator[A]
+
+ /** Checks if the other iterable collection contains the same elements in the same order as this $coll.
+ *
+ * $orderDependent
+ * $willNotTerminateInf
+ *
+ * @param that the collection to compare with.
+ * @tparam B the type of the elements of collection `that`.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
+ *
+ * @usecase def sameElements(that: GenIterable[A]): Boolean
+ *
+ * @param that the collection to compare with.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
+ */
+ def sameElements[A1 >: A](that: GenIterable[A1]): Boolean
+
+ /** Returns a $coll formed from this $coll and another iterable collection
+ * by combining corresponding elements in pairs.
+ * If one of the two collections is longer than the other, its remaining elements are ignored.
+ *
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam A1 the type of the first half of the returned pairs (this is always a supertype
+ * of the collection's element type `A`).
+ * @tparam B the type of the second half of the returned pairs
+ * @tparam That $zipthatinfo
+ * @param bf $zipbfinfo
+ * @return a new collection of type `That` containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
+ *
+ * @usecase def zip[B](that: GenIterable[B]): $Coll[(A, B)]
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
+ */
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CBF[Repr, (A1, B), That]): That
+
+ /** Zips this $coll with its indices.
+ *
+ * $orderDependent
+ *
+ * @tparam A1 the type of the first half of the returned pairs (this is always a supertype
+ * of the collection's element type `A`).
+ * @tparam That the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `(A1, Int)` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
+ * is found.
+ * @tparam bf an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `(A1, Int)`.
+ * @return A new collection of type `That` containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ *
+ * @usecase def zipWithIndex: $Coll[(A, Int)]
+ *
+ * @return A new $coll containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ * @example
+ * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
+ *
+ */
+ def zipWithIndex[A1 >: A, That](implicit bf: CBF[Repr, (A1, Int), That]): That
+
+ /** Returns a $coll formed from this $coll and another iterable collection
+ * by combining corresponding elements in pairs.
+ * If one of the two collections is shorter than the other,
+ * placeholder elements are used to extend the shorter collection to the length of the longer.
+ *
+ * $orderDependent
+ *
+ * @param that the iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @return a new collection of type `That` containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
+ *
+ * @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
+ */
+ def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That
+
+ def isEmpty = iterator.isEmpty
+
+ def head = iterator.next
+}
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
new file mode 100644
index 0000000000..2ae964bce3
--- /dev/null
+++ b/src/library/scala/collection/GenIterableView.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+
+trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { }
+
+
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
new file mode 100644
index 0000000000..c3f0adc310
--- /dev/null
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -0,0 +1,83 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+import generic._
+import TraversableView.NoBuilder
+
+
+
+private[collection] trait GenIterableViewLike[+A,
+ +Coll,
+ +This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]]
+extends GenIterable[A] with GenIterableLike[A, This] with GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This] {
+self =>
+
+ trait Transformed[+B] extends GenIterableView[B, Coll] with super.Transformed[B] {
+ def iterator: Iterator[B]
+ override def foreach[U](f: B => U): Unit = iterator foreach f
+ override def toString = viewToString
+ }
+
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView {
+ final def iterator: Iterator[Nothing] = Iterator.empty
+ }
+
+ trait Forced[B] extends super.Forced[B] with Transformed[B] {
+ def iterator = forced.iterator
+ }
+
+ trait Sliced extends super.Sliced with Transformed[A] {
+ def iterator: Iterator[A] = self.iterator.slice(from, until)
+ }
+
+ trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
+ def iterator = self.iterator map mapping
+ }
+
+ trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
+ def iterator: Iterator[B] = self.iterator flatMap mapping
+ }
+
+ trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
+ def iterator = self.iterator ++ rest
+ }
+
+ trait Filtered extends super.Filtered with Transformed[A] {
+ def iterator = self.iterator filter pred
+ }
+
+ trait TakenWhile extends super.TakenWhile with Transformed[A] {
+ def iterator = self.iterator takeWhile pred
+ }
+
+ trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
+ def iterator = self.iterator dropWhile pred
+ }
+
+ trait Zipped[B] extends Transformed[(A, B)] {
+ protected[this] val other: GenIterable[B]
+ def iterator: Iterator[(A, B)] = self.iterator zip other.iterator
+ final override protected[this] def viewIdentifier = "Z"
+ }
+
+ trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
+ protected[this] val other: GenIterable[B]
+ protected[this] val thisElem: A1
+ protected[this] val thatElem: B
+ final override protected[this] def viewIdentifier = "Z"
+ def iterator: Iterator[(A1, B)] =
+ self.iterator.zipAll(other.iterator, thisElem, thatElem)
+ }
+
+}
+
+
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
new file mode 100644
index 0000000000..7e7b6c7fb0
--- /dev/null
+++ b/src/library/scala/collection/GenMap.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import generic._
+
+
+/** A trait for all traversable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenMap[A, +B]
+extends GenMapLike[A, B, GenMap[A, B]]
+ with GenIterable[(A, B)]
+{
+ def seq: Map[A, B]
+}
+
+
+object GenMap extends MapFactory[GenMap] {
+ def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
+
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+}
+
+
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
new file mode 100644
index 0000000000..9ae388afb4
--- /dev/null
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -0,0 +1,64 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+/** A trait for all maps upon which operations may be
+ * implemented in parallel.
+ *
+ * @define Coll GenMap
+ * @define coll general map
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ * @define mapNote
+ *
+ * A map is a collection of bindings from keys to values, where there are
+ * no duplicate keys.
+ */
+private[collection] trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals with Parallelizable[(A, B), parallel.ParMap[A, B]] {
+ def default(key: A): B
+ def get(key: A): Option[B]
+ def apply(key: A): B
+ def seq: Map[A, B]
+ def +[B1 >: B](kv: (A, B1)): GenMap[A, B1]
+ def - (key: A): Repr
+
+ // This hash code must be symmetric in the contents but ought not
+ // collide trivially.
+ override def hashCode() = util.MurmurHash.symmetricHash(seq, Map.hashSeed)
+
+ /** Compares two maps structurally; i.e. checks if all mappings
+ * contained in this map are also contained in the other map,
+ * and vice versa.
+ *
+ * @param that the other map
+ * @return `true` if both maps contain exactly the
+ * same mappings, `false` otherwise.
+ */
+ override def equals(that: Any): Boolean = that match {
+ case that: GenMap[b, _] =>
+ (this eq that) ||
+ (that canEqual this) &&
+ (this.size == that.size) && {
+ try {
+ this forall {
+ case (k, v) => that.get(k.asInstanceOf[b]) match {
+ case Some(`v`) =>
+ true
+ case _ => false
+ }
+ }
+ } catch {
+ case ex: ClassCastException =>
+ println("class cast "); false
+ }}
+ case _ =>
+ false
+ }
+}
diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala
new file mode 100644
index 0000000000..ac5f953e88
--- /dev/null
+++ b/src/library/scala/collection/GenSeq.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for all sequences which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenSeq[+A]
+extends GenSeqLike[A, GenSeq[A]]
+ with GenIterable[A]
+ with Equals
+ with GenericTraversableTemplate[A, GenSeq]
+{
+ def seq: Seq[A]
+ override def companion: GenericCompanion[GenSeq] = GenSeq
+}
+
+
+object GenSeq extends TraversableFactory[GenSeq] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Seq.newBuilder
+}
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
new file mode 100644
index 0000000000..1502853d8e
--- /dev/null
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -0,0 +1,410 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import generic._
+
+/** A template trait for all sequences which may be traversed
+ * in parallel.
+ *
+ * @define Coll GenSeq
+ * @define coll general sequence
+ * @define mayNotTerminateInf
+ *
+ * Note: may not terminate for infinite-sized collections.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ * @define seqInfo
+ * Sequences are special cases of iterable collections of class `Iterable`.
+ * Unlike iterables, sequences always have a defined order of elements.
+ */
+private[collection] trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] {
+
+ /** Selects an element by its index in the $coll.
+ *
+ * @param idx The index to select.
+ * @return the element of this $coll at index `idx`, where `0` indicates the first element.
+ * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ */
+ def apply(idx: Int): A
+
+ /** The length of the $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: `xs.length` and `xs.size` yield the same result.
+ *
+ * @return the number of elements in this $coll.
+ */
+ def length: Int
+
+ /** Tests whether this $coll contains given index.
+ *
+ * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into
+ * a `PartialFunction[Int, A]`.
+ *
+ * @param idx the index to test
+ * @return `true` if this $coll contains an element at position `idx`, `false` otherwise.
+ */
+ def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length)
+
+ /** Computes length of longest segment whose elements all satisfy some predicate.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @param from the index where the search starts.
+ * @return the length of the longest segment of this $coll starting from index `from`
+ * such that every element of the segment satisfies the predicate `p`.
+ */
+ def segmentLength(p: A => Boolean, from: Int): Int
+
+ /** Returns the length of the longest prefix whose elements all satisfy some predicate.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @return the length of the longest prefix of this $coll
+ * such that every element of the segment satisfies the predicate `p`.
+ */
+ def prefixLength(p: A => Boolean): Int = segmentLength(p, 0)
+
+ /** Finds index of the first element satisfying some predicate after or at some start index.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @param from the start index
+ * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
+ */
+ def indexWhere(p: A => Boolean, from: Int): Int
+
+ /** Finds index of first element satisfying some predicate.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @return the index of the first element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
+ */
+ def indexWhere(p: A => Boolean): Int = indexWhere(p, 0)
+
+ /** Finds index of first occurrence of some value in this $coll.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param elem the element value to search for.
+ * @tparam B the type of the element `elem`.
+ * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def indexOf(elem: A): Int
+ */
+ def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
+
+ /** Finds index of first occurrence of some value in this $coll after or at some start index.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param elem the element value to search for.
+ * @tparam B the type of the element `elem`.
+ * @param from the start index
+ * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def indexOf(elem: A, from: Int): Int
+ */
+ def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
+
+ /** Finds index of last occurrence of some value in this $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * @param elem the element value to search for.
+ * @tparam B the type of the element `elem`.
+ * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def lastIndexOf(elem: A): Int
+ */
+ def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
+
+ /** Finds index of last occurrence of some value in this $coll before or at a given end index.
+ *
+ * @param elem the element value to search for.
+ * @param end the end index.
+ * @tparam B the type of the element `elem`.
+ * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def lastIndexOf(elem: A, end: Int): Int
+ */
+ def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
+
+ /** Finds index of last element satisfying some predicate.
+ *
+ * $willNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @return the index of the last element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
+ */
+ def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, length - 1)
+
+ /** Finds index of last element satisfying some predicate before or at given end index.
+ *
+ * @param p the predicate used to test elements.
+ * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
+ */
+ def lastIndexWhere(p: A => Boolean, end: Int): Int
+
+ /** Returns new $coll wih elements in reversed order.
+ *
+ * $willNotTerminateInf
+ *
+ * @return A new $coll with all elements of this $coll in reversed order.
+ */
+ def reverse: Repr
+
+ /**
+ * Builds a new collection by applying a function to all elements of this $coll and
+ * collecting the results in reversed order.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
+ *
+ * @usecase def reverseMap[B](f: A => B): $Coll[B]
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
+ */
+ def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Tests whether this $coll starts with the given sequence.
+ *
+ * @param that the sequence to test
+ * @return `true` if this collection has `that` as a prefix, `false` otherwise.
+ */
+ def startsWith[B](that: GenSeq[B]): Boolean = startsWith(that, 0)
+
+ /** Tests whether this $coll contains the given sequence at a given index.
+ *
+ * If the both the receiver object, <code>this</code> and
+ * the argument, <code>that</code> are infinite sequences
+ * this method may not terminate.
+ *
+ * @param that the sequence to test
+ * @param offset the index where the sequence is searched.
+ * @return `true` if the sequence `that` is contained in this $coll at index `offset`,
+ * otherwise `false`.
+ */
+ def startsWith[B](that: GenSeq[B], offset: Int): Boolean
+
+ /** Tests whether this $coll ends with the given sequence.
+ * $willNotTerminateInf
+ * @param that the sequence to test
+ * @return `true` if this $coll has `that` as a suffix, `false` otherwise.
+ */
+ def endsWith[B](that: GenSeq[B]): Boolean
+
+ /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence.
+ *
+ * @param from the index of the first replaced element
+ * @param patch the replacement sequence
+ * @param replaced the number of elements to drop in the original $coll
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
+ * @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A]
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
+ */
+ def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** A copy of this $coll with one single replaced element.
+ * @param index the position of the replacement
+ * @param elem the replacing element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
+ * @usecase def updated(index: Int, elem: A): $Coll[A]
+ * @return a copy of this $coll with the element at position `index` replaced by `elem`.
+ */
+ def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Prepends an element to this $coll
+ * @param elem the prepended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of `elem` followed
+ * by all elements of this $coll.
+ * @usecase def +:(elem: A): $Coll[A]
+ * @return a new $coll consisting of `elem` followed
+ * by all elements of this $coll.
+ */
+ def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Appends an element to this $coll
+ * $willNotTerminateInf
+ * @param elem the appended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of
+ * all elements of this $coll followed by `elem`.
+ * @usecase def :+(elem: A): $Coll[A]
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by `elem`.
+ */
+ def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Appends an element value to this $coll until a given target length is reached.
+ * @param len the target length
+ * @param elem the padding value
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting collection has a length of at least `len`.
+ * @usecase def padTo(len: Int, elem: A): $Coll[A]
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting $coll has a length of at least `len`.
+ */
+ def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Tests whether every element of this $coll relates to the
+ * corresponding element of another sequence by satisfying a test predicate.
+ *
+ * @param that the other sequence
+ * @param p the test predicate, which relates elements from both sequences
+ * @tparam B the type of the elements of `that`
+ * @return `true` if both sequences have the same length and
+ * `p(x, y)` is `true` for all corresponding elements `x` of this $coll
+ * and `y` of `that`, otherwise `false`.
+ */
+ def corresponds[B](that: GenSeq[B])(p: (A, B) => Boolean): Boolean
+
+ def toSeq: GenSeq[A]
+
+ /** Produces a new sequence which contains all elements of this $coll and also all elements of
+ * a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
+ * $willNotTerminateInf
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @param that the sequence to add.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * followed by all elements of `that`.
+ * @usecase def union(that: GenSeq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
+ */
+ def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that
+
+ /** Computes the multiset difference between this $coll and another sequence.
+ * $willNotTerminateInf
+ *
+ * @param that the sequence of elements to remove
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ * @usecase def diff(that: GenSeq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ */
+ def diff[B >: A](that: GenSeq[B]): Repr
+
+ /** Computes the multiset intersection between this $coll and another sequence.
+ * $mayNotTerminateInf
+ *
+ * @param that the sequence of elements to intersect with.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ * @usecase def intersect(that: GenSeq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ */
+ def intersect[B >: A](that: GenSeq[B]): Repr
+
+ /** Builds a new $coll from this $coll without any duplicate elements.
+ * $willNotTerminateInf
+ *
+ * @return A new $coll which contains the first occurrence of every element of this $coll.
+ */
+ def distinct: Repr
+
+ /** Hashcodes for $Coll produce a value from the hashcodes of all the
+ * elements of the $coll.
+ */
+ override def hashCode() = {
+ val h = new util.MurmurHash[A](Seq.hashSeed)
+ seq.foreach(h)
+ h.hash
+ }
+
+ /** The equals method for arbitrary sequences. Compares this sequence to
+ * some other object.
+ * @param that The object to compare the sequence to
+ * @return `true` if `that` is a sequence that has the same elements as
+ * this sequence in the same order, `false` otherwise
+ */
+ override def equals(that: Any): Boolean = that match {
+ case that: GenSeq[_] => (that canEqual this) && (this sameElements that)
+ case _ => false
+ }
+
+}
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
new file mode 100644
index 0000000000..c18c656b55
--- /dev/null
+++ b/src/library/scala/collection/GenSeqView.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+
+trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { }
+
+
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
new file mode 100644
index 0000000000..74d558342f
--- /dev/null
+++ b/src/library/scala/collection/GenSeqViewLike.scala
@@ -0,0 +1,164 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+
+private[collection] trait GenSeqViewLike[+A,
+ +Coll,
+ +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
+extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
+self =>
+
+ trait Transformed[+B] extends GenSeqView[B, Coll] with super.Transformed[B] {
+ def length: Int
+ def apply(idx: Int): B
+ override def toString = viewToString
+ }
+
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView {
+ final override def length = 0
+ final override def apply(n: Int) = Nil(n)
+ }
+
+ trait Forced[B] extends super.Forced[B] with Transformed[B] {
+ def length = forced.length
+ def apply(idx: Int) = forced.apply(idx)
+ }
+
+ trait Sliced extends super.Sliced with Transformed[A] {
+ def length = iterator.size
+ def apply(idx: Int): A =
+ if (idx + from < until) self.apply(idx + from)
+ else throw new IndexOutOfBoundsException(idx.toString)
+
+ override def foreach[U](f: A => U) = iterator foreach f
+ override def iterator: Iterator[A] = self.iterator drop from take endpoints.width
+ }
+
+ trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
+ def length = self.length
+ def apply(idx: Int): B = mapping(self(idx))
+ }
+
+ trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
+ protected[this] lazy val index = {
+ val index = new Array[Int](self.length + 1)
+ index(0) = 0
+ for (i <- 0 until self.length) // note that if the mapping returns a list, performance is bad, bad
+ index(i + 1) = index(i) + mapping(self(i)).seq.size
+ index
+ }
+ protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
+ val mid = (lo + hi) / 2
+ if (idx < index(mid)) findRow(idx, lo, mid - 1)
+ else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
+ else mid
+ }
+ def length = index(self.length)
+ def apply(idx: Int) = {
+ val row = findRow(idx, 0, self.length - 1)
+ mapping(self(row)).seq.toSeq(idx - index(row))
+ }
+ }
+
+ trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
+ protected[this] lazy val restSeq = rest.toSeq
+ def length = self.length + restSeq.length
+ def apply(idx: Int) =
+ if (idx < self.length) self(idx) else restSeq(idx - self.length)
+ }
+
+ trait Filtered extends super.Filtered with Transformed[A] {
+ protected[this] lazy val index = {
+ var len = 0
+ val arr = new Array[Int](self.length)
+ for (i <- 0 until self.length)
+ if (pred(self(i))) {
+ arr(len) = i
+ len += 1
+ }
+ arr take len
+ }
+ def length = index.length
+ def apply(idx: Int) = self(index(idx))
+ }
+
+ trait TakenWhile extends super.TakenWhile with Transformed[A] {
+ protected[this] lazy val len = self prefixLength pred
+ def length = len
+ def apply(idx: Int) =
+ if (idx < len) self(idx)
+ else throw new IndexOutOfBoundsException(idx.toString)
+ }
+
+ trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
+ protected[this] lazy val start = self prefixLength pred
+ def length = self.length - start
+ def apply(idx: Int) =
+ if (idx >= 0) self(idx + start)
+ else throw new IndexOutOfBoundsException(idx.toString)
+ }
+
+ trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] {
+ protected[this] lazy val thatSeq = other.seq.toSeq
+ /* Have to be careful here - other may be an infinite sequence. */
+ def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
+ def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
+ }
+
+ trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] {
+ protected[this] lazy val thatSeq = other.seq.toSeq
+ def length: Int = self.length max thatSeq.length
+ def apply(idx: Int) =
+ (if (idx < self.length) self.apply(idx) else thisElem,
+ if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
+ }
+
+ trait Reversed extends Transformed[A] {
+ override def iterator: Iterator[A] = createReversedIterator
+ def length: Int = self.length
+ def apply(idx: Int): A = self.apply(length - 1 - idx)
+ final override protected[this] def viewIdentifier = "R"
+
+ private def createReversedIterator = {
+ var lst = List[A]()
+ for (elem <- self) lst ::= elem
+ lst.iterator
+ }
+ }
+
+ trait Patched[B >: A] extends Transformed[B] {
+ protected[this] val from: Int
+ protected[this] val patch: GenSeq[B]
+ protected[this] val replaced: Int
+ private lazy val plen = patch.length
+ override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
+ def length: Int = self.length + plen - replaced
+ def apply(idx: Int): B =
+ if (idx < from) self.apply(idx)
+ else if (idx < from + plen) patch.apply(idx - from)
+ else self.apply(idx - plen + replaced)
+ final override protected[this] def viewIdentifier = "P"
+ }
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: B
+ override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
+ def length: Int = 1 + self.length
+ def apply(idx: Int): B =
+ if (idx == 0) fst
+ else self.apply(idx - 1)
+ final override protected[this] def viewIdentifier = "A"
+ }
+
+}
+
+
diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala
new file mode 100644
index 0000000000..ec066844b4
--- /dev/null
+++ b/src/library/scala/collection/GenSet.scala
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for sets which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenSet[A]
+extends GenSetLike[A, GenSet[A]]
+ with GenIterable[A]
+ with GenericSetTemplate[A, GenSet]
+{
+ override def companion: GenericCompanion[GenSet] = GenSet
+ def seq: Set[A]
+}
+
+
+object GenSet extends TraversableFactory[GenSet] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Set.newBuilder
+}
+
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
new file mode 100644
index 0000000000..d45810d2e7
--- /dev/null
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -0,0 +1,131 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+/** A template trait for sets which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @define Coll GenSet
+ * @define coll general set
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ * @define setNote
+ *
+ * A set is a collection that contains no duplicate elements.
+ */
+private[collection] trait GenSetLike[A, +Repr]
+ extends GenIterableLike[A, Repr]
+ with (A => Boolean)
+ with Equals
+ with Parallelizable[A, parallel.ParSet[A]] {
+
+ def iterator: Iterator[A]
+ def contains(elem: A): Boolean
+ def +(elem: A): Repr
+ def -(elem: A): Repr
+
+ def seq: Set[A]
+
+ /** Tests if some element is contained in this set.
+ *
+ * This method is equivalent to `contains`. It allows sets to be interpreted as predicates.
+ * @param elem the element to test for membership.
+ * @return `true` if `elem` is contained in this set, `false` otherwise.
+ */
+ def apply(elem: A): Boolean = this contains elem
+
+ /** Computes the intersection between this set and another set.
+ *
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
+ */
+ def intersect(that: GenSet[A]): Repr = this filter that
+
+ /** Computes the intersection between this set and another set.
+ *
+ * '''Note:''' Same as `intersect`.
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
+ */
+ def &(that: GenSet[A]): Repr = this intersect that
+
+ /** Computes the union between of set and another set.
+ *
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
+ */
+ def union(that: GenSet[A]): Repr
+
+ /** Computes the union between this set and another set.
+ *
+ * '''Note:''' Same as `union`.
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
+ */
+ def | (that: GenSet[A]): Repr = this union that
+
+ /** Computes the difference of this set and another set.
+ *
+ * @param that the set of elements to exclude.
+ * @return a set containing those elements of this
+ * set that are not also contained in the given set `that`.
+ */
+ def diff(that: GenSet[A]): Repr
+
+ /** The difference of this set and another set.
+ *
+ * '''Note:''' Same as `diff`.
+ * @param that the set of elements to exclude.
+ * @return a set containing those elements of this
+ * set that are not also contained in the given set `that`.
+ */
+ def &~(that: GenSet[A]): Repr = this diff that
+
+ /** Tests whether this set is a subset of another set.
+ *
+ * @param that the set to test.
+ * @return `true` if this set is a subset of `that`, i.e. if
+ * every element of this set is also an element of `that`.
+ */
+ def subsetOf(that: GenSet[A]): Boolean = this forall that
+
+ /** Compares this set with another object for equality.
+ *
+ * '''Note:''' This operation contains an unchecked cast: if `that`
+ * is a set, it will assume with an unchecked cast
+ * that it has the same element type as this set.
+ * Any subsequent ClassCastException is treated as a `false` result.
+ * @param that the other object
+ * @return `true` if `that` is a set which contains the same elements
+ * as this set.
+ */
+ override def equals(that: Any): Boolean = that match {
+ case that: GenSet[_] =>
+ (this eq that) ||
+ (that canEqual this) &&
+ (this.size == that.size) &&
+ (try this subsetOf that.asInstanceOf[GenSet[A]]
+ catch { case ex: ClassCastException => false })
+ case _ =>
+ false
+ }
+
+ // Careful! Don't write a Set's hashCode like:
+ // override def hashCode() = this map (_.hashCode) sum
+ // Calling map on a set drops duplicates: any hashcode collisions would
+ // then be dropped before they can be added.
+ // Hash should be symmetric in set entries, but without trivial collisions.
+ override def hashCode() = util.MurmurHash.symmetricHash(seq, Set.hashSeed)
+
+}
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
new file mode 100644
index 0000000000..fc1f84d2e5
--- /dev/null
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -0,0 +1,39 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for all traversable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenTraversable[+A]
+extends GenTraversableLike[A, GenTraversable[A]]
+ with GenTraversableOnce[A]
+ with GenericTraversableTemplate[A, GenTraversable]
+{
+ def seq: Traversable[A]
+ def companion: GenericCompanion[GenTraversable] = GenTraversable
+}
+
+
+object GenTraversable extends TraversableFactory[GenTraversable] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Traversable.newBuilder
+}
+
+
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
new file mode 100644
index 0000000000..44aae3053f
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -0,0 +1,333 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+import annotation.migration
+
+
+/** A template trait for all traversable collections upon which operations
+ * may be implemented in parallel.
+ *
+ * @define thatinfo the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `B` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]`
+ * is found.
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines
+ * the result class `That` from the current representation type `Repr` and
+ * and the new element type `B`.
+ * @define orderDependent
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * @define orderDependentFold
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * or the operator is associative and commutative.
+ * @define mayNotTerminateInf
+ *
+ * Note: may not terminate for infinite-sized collections.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ *
+ * @define traversableInfo
+ * This is a base trait of all kinds of Scala collections.
+ *
+ * @define Coll GenTraversable
+ * @define coll general collection
+ * @tparam A the collection element type.
+ * @tparam Repr the actual type of the element container.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+private[collection] trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] {
+
+ def repr: Repr
+
+ def size: Int
+
+ def head: A
+
+ /** Tests whether this $coll can be repeatedly traversed.
+ * @return `true`
+ */
+ final def isTraversableAgain = true
+
+ /** Selects all elements except the first.
+ * $orderDependent
+ * @return a $coll consisting of all elements of this $coll
+ * except the first one.
+ * @throws `UnsupportedOperationException` if the $coll is empty.
+ */
+ def tail: Repr = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.tail")
+ drop(1)
+ }
+
+ /** Computes a prefix scan of the elements of the collection.
+ *
+ * Note: The neutral element `z` may be applied more than once.
+ *
+ * @tparam B element type of the resulting collection
+ * @tparam That type of the resulting collection
+ * @param z neutral element for the operator `op`
+ * @param op the associative operator for the scan
+ * @param cbf combiner factory which provides a combiner
+ *
+ * @return a new $coll containing the prefix scan of the elements in this $coll
+ */
+ def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That
+
+ /** Produces a collection containing cummulative results of applying the
+ * operator going left to right.
+ *
+ * $willNotTerminateInf
+ * $orderDependent
+ *
+ * @tparam B the type of the elements in the resulting collection
+ * @tparam That the actual type of the resulting collection
+ * @param z the initial value
+ * @param op the binary operator applied to the intermediate result and the element
+ * @param bf $bfinfo
+ * @return collection with intermediate results
+ */
+ def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Produces a collection containing cummulative results of applying the operator going right to left.
+ * The head of the collection is the last cummulative result.
+ * $willNotTerminateInf
+ * $orderDependent
+ *
+ * Example:
+ * {{{
+ * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0)
+ * }}}
+ *
+ * @tparam B the type of the elements in the resulting collection
+ * @tparam That the actual type of the resulting collection
+ * @param z the initial value
+ * @param op the binary operator applied to the intermediate result and the element
+ * @param bf $bfinfo
+ * @return collection with intermediate results
+ */
+ @migration(2, 9,
+ "This scanRight definition has changed in 2.9.\n" +
+ "The previous behavior can be reproduced with scanRight.reverse."
+ )
+ def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Applies a function `f` to all elements of this $coll.
+ *
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
+ *
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ *
+ * @usecase def foreach(f: A => Unit): Unit
+ */
+ def foreach[U](f: A => U): Unit
+
+ /** Builds a new collection by applying a function to all elements of this $coll.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
+ *
+ * @usecase def map[B](f: A => B): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
+ */
+ def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Builds a new collection by applying a partial function to all elements of this $coll
+ * on which the function is defined.
+ *
+ * @param pf the partial function which filters and maps the $coll.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ *
+ * @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ */
+ def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Builds a new collection by applying a function to all elements of this $coll
+ * and concatenating the results.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ *
+ * @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Concatenates this $coll with the elements of a traversable collection.
+ *
+ * @param that the traversable to append.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements
+ * of this $coll followed by all elements of `that`.
+ *
+ * @usecase def ++[B](that: GenTraversableOnce[B]): $Coll[B]
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
+ */
+ def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Selects all elements of this $coll which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return a new $coll consisting of all elements of this $coll that satisfy the given
+ * predicate `p`. Their order may not be preserved.
+ */
+ def filter(pred: A => Boolean): Repr
+
+ /** Selects all elements of this $coll which do not satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return a new $coll consisting of all elements of this $coll that do not satisfy the given
+ * predicate `p`. Their order may not be preserved.
+ */
+ def filterNot(pred: A => Boolean): Repr
+
+ /** Partitions this $coll in two ${coll}s according to a predicate.
+ *
+ * @param p the predicate on which to partition.
+ * @return a pair of ${coll}s: the first $coll consists of all elements that
+ * satisfy the predicate `p` and the second $coll consists of all elements
+ * that don't. The relative order of the elements in the resulting ${coll}s
+ * may not be preserved.
+ */
+ def partition(pred: A => Boolean): (Repr, Repr)
+
+ /** Partitions this $coll into a map of ${coll}s according to some discriminator function.
+ *
+ * Note: this method is not re-implemented by views. This means
+ * when applied to a view it will always force the view and
+ * return a new $coll.
+ *
+ * @param f the discriminator function.
+ * @tparam K the type of keys returned by the discriminator function.
+ * @return A map from keys to ${coll}s such that the following invariant holds:
+ * {{{
+ * (xs partition f)(k) = xs filter (x => f(x) == k)
+ * }}}
+ * That is, every key `k` is bound to a $coll of those elements `x`
+ * for which `f(x)` equals `k`.
+ *
+ */
+ def groupBy[K](f: A => K): GenMap[K, Repr]
+
+ /** Selects first ''n'' elements.
+ * $orderDependent
+ * @param n Tt number of elements to take from this $coll.
+ * @return a $coll consisting only of the first `n` elements of this $coll,
+ * or else the whole $coll, if it has less than `n` elements.
+ */
+ def take(n: Int): Repr
+
+ /** Selects all elements except first ''n'' ones.
+ * $orderDependent
+ * @param n the number of elements to drop from this $coll.
+ * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the
+ * empty $coll, if this $coll has less than `n` elements.
+ */
+ def drop(n: Int): Repr
+
+ /** Selects an interval of elements. The returned collection is made up
+ * of all elements `x` which satisfy the invariant:
+ * {{{
+ * from <= indexOf(x) < until
+ * }}}
+ * $orderDependent
+ *
+ * @param from the lowest index to include from this $coll.
+ * @param until the highest index to EXCLUDE from this $coll.
+ * @return a $coll containing the elements greater than or equal to
+ * index `from` extending up to (but not including) index `until`
+ * of this $coll.
+ */
+ def slice(unc_from: Int, unc_until: Int): Repr
+
+ /** Splits this $coll into two at a given position.
+ * Note: `c splitAt n` is equivalent to (but possibly more efficient than)
+ * `(c take n, c drop n)`.
+ * $orderDependent
+ *
+ * @param n the position at which to split.
+ * @return a pair of ${coll}s consisting of the first `n`
+ * elements of this $coll, and the other elements.
+ */
+ def splitAt(n: Int): (Repr, Repr)
+
+ /** Takes longest prefix of elements that satisfy a predicate.
+ * $orderDependent
+ * @param p The predicate used to test elements.
+ * @return the longest prefix of this $coll whose elements all satisfy
+ * the predicate `p`.
+ */
+ def takeWhile(pred: A => Boolean): Repr
+
+ /** Splits this $coll into a prefix/suffix pair according to a predicate.
+ *
+ * Note: `c span p` is equivalent to (but possibly more efficient than)
+ * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the
+ * predicate `p` does not cause any side-effects.
+ * $orderDependent
+ *
+ * @param p the test predicate
+ * @return a pair consisting of the longest prefix of this $coll whose
+ * elements all satisfy `p`, and the rest of this $coll.
+ */
+ def span(pred: A => Boolean): (Repr, Repr)
+
+ /** Drops longest prefix of elements that satisfy a predicate.
+ * $orderDependent
+ * @param p The predicate used to test elements.
+ * @return the longest suffix of this $coll whose first element
+ * does not satisfy the predicate `p`.
+ */
+ def dropWhile(pred: A => Boolean): Repr
+
+ /** Defines the prefix of this object's `toString` representation.
+ *
+ * @return a string representation which starts the result of `toString`
+ * applied to this $coll. By default the string prefix is the
+ * simple name of the collection class $coll.
+ */
+ def stringPrefix: String
+
+}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
new file mode 100644
index 0000000000..053a6d211a
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -0,0 +1,510 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+/** A template trait for all traversable-once objects which may be
+ * traversed in parallel.
+ *
+ * Methods in this trait are either abstract or can be implemented in terms
+ * of other methods.
+ *
+ * @define Coll GenTraversableOnce
+ * @define coll collection or iterator
+ * @define possiblyparinfo
+ * This trait may possibly have operations implemented in parallel.
+ * @define undefinedorder
+ * The order in which operations are performed on elements is unspecified and may be nondeterministic.
+ * @define orderDependent
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * @define orderDependentFold
+ *
+ * Note: might return different results for different runs, unless the
+ * underlying collection type is ordered or the operator is associative
+ * and commutative.
+ * @define mayNotTerminateInf
+ *
+ * Note: may not terminate for infinite-sized collections.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+private[collection] trait GenTraversableOnce[+A] {
+
+ def foreach[U](f: A => U): Unit
+
+ def hasDefiniteSize: Boolean
+
+ def seq: TraversableOnce[A]
+
+ /** The size of this $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * @return the number of elements in this $coll.
+ */
+ def size: Int
+
+ /** Tests whether the $coll is empty.
+ *
+ * @return `true` if the $coll contains no elements, `false` otherwise.
+ */
+ def isEmpty: Boolean
+
+ /** Tests whether the $coll is not empty.
+ *
+ * @return `true` if the $coll contains at least one element, `false` otherwise.
+ */
+ def nonEmpty: Boolean
+
+ /** Tests whether this $coll can be repeatedly traversed. Always
+ * true for Traversables and false for Iterators unless overridden.
+ *
+ * @return `true` if it is repeatedly traversable, `false` otherwise.
+ */
+ def isTraversableAgain: Boolean
+
+ /** Reduces the elements of this sequence using the specified associative binary operator.
+ *
+ * $undefinedorder
+ *
+ * Note this method has a different signature than the `reduceLeft`
+ * and `reduceRight` methods of the trait `Traversable`.
+ * The result of reducing may only be a supertype of this parallel collection's
+ * type parameter `T`.
+ *
+ * @tparam U A type parameter for the binary operator, a supertype of `T`.
+ * @param op A binary operator that must be associative.
+ * @return The result of applying reduce operator `op` between all the elements if the collection is nonempty.
+ * @throws UnsupportedOperationException
+ * if this $coll is empty.
+ */
+ def reduce[A1 >: A](op: (A1, A1) => A1): A1
+
+ /** Optionally reduces the elements of this sequence using the specified associative binary operator.
+ *
+ * $undefinedorder
+ *
+ * Note this method has a different signature than the `reduceLeftOption`
+ * and `reduceRightOption` methods of the trait `Traversable`.
+ * The result of reducing may only be a supertype of this parallel collection's
+ * type parameter `T`.
+ *
+ * @tparam U A type parameter for the binary operator, a supertype of `T`.
+ * @param op A binary operator that must be associative.
+ * @return An option value containing result of applying reduce operator `op` between all
+ * the elements if the collection is nonempty, and `None` otherwise.
+ */
+ def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1]
+
+ /** Folds the elements of this sequence using the specified associative binary operator.
+ * The order in which the elements are reduced is unspecified and may be nondeterministic.
+ *
+ * Note this method has a different signature than the `foldLeft`
+ * and `foldRight` methods of the trait `Traversable`.
+ * The result of folding may only be a supertype of this parallel collection's
+ * type parameter `T`.
+ *
+ * @tparam U a type parameter for the binary operator, a supertype of `T`.
+ * @param z a neutral element for the fold operation, it may be added to the result
+ * an arbitrary number of times, not changing the result (e.g. `Nil` for list concatenation,
+ * 0 for addition, or 1 for multiplication)
+ * @param op a binary operator that must be associative
+ * @return the result of applying fold operator `op` between all the elements and `z`
+ */
+ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1
+
+ /** A syntactic sugar for out of order folding. See `fold`. */
+ def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
+
+ /** Applies a binary operator to a start value and all elements of this $coll,
+ * going left to right.
+ *
+ * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as
+ * `xs foldLeft z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def /:[B](z: B)(op: (B, A) => B): B
+
+ /** Applies a binary operator to all elements of this $coll and a start value,
+ * going right to left.
+ *
+ * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as
+ * `xs foldRight z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value
+ * @param op the binary operator
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def :\[B](z: B)(op: (A, B) => B): B
+
+ /** Applies a binary operator to a start value and all elements of this $coll,
+ * going left to right.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldLeft[B](z: B)(op: (B, A) => B): B
+
+ /** Applies a binary operator to all elements of this $coll and a start value,
+ * going right to left.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldRight[B](z: B)(op: (A, B) => B): B
+
+ /** Aggregates the results of applying an operator to subsequent elements.
+ *
+ * This is a more general form of `fold` and `reduce`. It has similar semantics, but does
+ * not require the result to be a supertype of the element type. It traverses the elements in
+ * different partitions sequentially, using `seqop` to update the result, and then
+ * applies `combop` to results from different partitions. The implementation of this
+ * operation may operate on an arbitrary number of collection partitions, so `combop`
+ * may be invoked arbitrary number of times.
+ *
+ * For example, one might want to process some elements and then produce a `Set`. In this
+ * case, `seqop` would process an element and append it to the list, while `combop`
+ * would concatenate two lists from different partitions together. The initial value
+ * `z` would be an empty set.
+ *
+ * {{{
+ * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _)
+ * }}}
+ *
+ * Another example is calculating geometric mean from a collection of doubles
+ * (one would typically require big doubles for this).
+ *
+ * @tparam S the type of accumulated results
+ * @param z the initial value for the accumulated result of the partition - this
+ * will typically be the neutral element for the `seqop` operator (e.g.
+ * `Nil` for list concatenation or `0` for summation)
+ * @param seqop an operator used to accumulate results within a partition
+ * @param combop an associative operator used to combine results from different partitions
+ */
+ def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B
+
+ /** Applies a binary operator to all elements of this $coll, going right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceRight[B >: A](op: (A, B) => B): B
+
+ /** Optionally applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B]
+
+ /** Optionally applies a binary operator to all elements of this $coll, going
+ * right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceRightOption[B >: A](op: (A, B) => B): Option[B]
+
+ /** Counts the number of elements in the $coll which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return the number of elements satisfying the predicate `p`.
+ */
+ def count(p: A => Boolean): Int
+
+ /** Sums up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `+` operator to be used in forming the sum.
+ * @tparam B the result type of the `+` operator.
+ * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
+ *
+ * @usecase def sum: A
+ *
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ *
+ */
+ def sum[A1 >: A](implicit num: Numeric[A1]): A1
+
+ /** Multiplies up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `*` operator to be used in forming the product.
+ * @tparam B the result type of the `*` operator.
+ * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
+ *
+ * @usecase def product: A
+ *
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ */
+ def product[A1 >: A](implicit num: Numeric[A1]): A1
+
+ /** Finds the smallest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the smallest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def min: A
+ * @return the smallest element of this $coll
+ */
+ def min[A1 >: A](implicit ord: Ordering[A1]): A
+
+ /** Finds the largest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the largest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def max: A
+ * @return the largest element of this $coll.
+ */
+ def max[A1 >: A](implicit ord: Ordering[A1]): A
+
+ def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A
+
+ def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A
+
+ def forall(pred: A => Boolean): Boolean
+
+ def exists(pred: A => Boolean): Boolean
+
+ /** Finds the first element of the $coll satisfying a predicate, if any.
+ *
+ * $mayNotTerminateInf
+ * $orderDependent
+ *
+ * @param p the predicate used to test elements.
+ * @return an option value containing the first element in the $coll
+ * that satisfies `p`, or `None` if none exists.
+ */
+ def find(pred: A => Boolean): Option[A]
+
+ /** Copies values of this $coll to an array.
+ * Fills the given array `xs` with values of this $coll.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A]): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B]): Unit
+
+ /** Copies values of this $coll to an array.
+ * Fills the given array `xs` with values of this $coll, after skipping `start` values.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit
+
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+
+ /** Displays all elements of this $coll in a string using start, end, and
+ * separator strings.
+ *
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return a string representation of this $coll. The resulting string
+ * begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method
+ * `toString`) of all elements of this $coll are separated by
+ * the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
+ */
+ def mkString(start: String, sep: String, end: String): String
+
+ /** Displays all elements of this $coll in a string using a separator string.
+ *
+ * @param sep the separator string.
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("|") = "1|2|3"`
+ */
+ def mkString(sep: String): String
+
+ /** Displays all elements of this $coll in a string.
+ *
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll follow each other without any
+ * separator string.
+ */
+ def mkString: String
+
+ /** Converts this $coll to an array.
+ * $willNotTerminateInf
+ *
+ * @tparam B the type of the elements of the array. A `ClassManifest` for
+ * this type must be available.
+ * @return an array containing all elements of this $coll.
+ *
+ * @usecase def toArray: Array[A]
+ * @return an array containing all elements of this $coll.
+ * A `ClassManifest` must be available for the element type of this $coll.
+ */
+ def toArray[A1 >: A: ClassManifest]: Array[A1]
+
+ /** Converts this $coll to a list.
+ * $willNotTerminateInf
+ * @return a list containing all elements of this $coll.
+ */
+ def toList: List[A]
+
+ /** Converts this $coll to an indexed sequence.
+ * $willNotTerminateInf
+ * @return an indexed sequence containing all elements of this $coll.
+ */
+ def toIndexedSeq[A1 >: A]: immutable.IndexedSeq[A1]
+
+ /** Converts this $coll to a stream.
+ * $willNotTerminateInf
+ * @return a stream containing all elements of this $coll.
+ */
+ def toStream: Stream[A]
+
+ /** Returns an Iterator over the elements in this $coll. Will return
+ * the same Iterator if this instance is already an Iterator.
+ * $willNotTerminateInf
+ * @return an Iterator containing all elements of this $coll.
+ */
+ def toIterator: Iterator[A]
+
+ /** Converts this $coll to a mutable buffer.
+ * $willNotTerminateInf
+ * @return a buffer containing all elements of this $coll.
+ */
+ def toBuffer[A1 >: A]: collection.mutable.Buffer[A1]
+
+ /** Converts this $coll to an unspecified Traversable. Will return
+ * the same collection if this instance is already Traversable.
+ * $willNotTerminateInf
+ * @return a Traversable containing all elements of this $coll.
+ */
+ def toTraversable: GenTraversable[A]
+
+ /** Converts this $coll to an iterable collection. Note that
+ * the choice of target `Iterable` is lazy in this default implementation
+ * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may
+ * be an iterator which is only traversable once).
+ *
+ * $willNotTerminateInf
+ * @return an `Iterable` containing all elements of this $coll.
+ */
+ def toIterable: GenIterable[A]
+
+ /** Converts this $coll to a sequence. As with `toIterable`, it's lazy
+ * in this default implementation, as this `TraversableOnce` may be
+ * lazy and unevaluated.
+ *
+ * $willNotTerminateInf
+ * @return a sequence containing all elements of this $coll.
+ */
+ def toSeq: GenSeq[A]
+
+ /** Converts this $coll to a set.
+ * $willNotTerminateInf
+ * @return a set containing all elements of this $coll.
+ */
+ def toSet[A1 >: A]: GenSet[A1]
+
+ /** Converts this $coll to a map. This method is unavailable unless
+ * the elements are members of Tuple2, each ((T, U)) becoming a key-value
+ * pair in the map. Duplicate keys will be overwritten by later keys:
+ * if this is an unordered collection, which key is in the resulting map
+ * is undefined.
+ * $willNotTerminateInf
+ * @return a map containing all elements of this $coll.
+ * @usecase def toMap[T, U]: Map[T, U]
+ * @return a map of type `immutable.Map[T, U]`
+ * containing all key/value pairs of type `(T, U)` of this $coll.
+ */
+ def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+}
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
new file mode 100644
index 0000000000..e29595527b
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableView.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+
+trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { }
+
+
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
new file mode 100644
index 0000000000..3d2ebf3a22
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -0,0 +1,141 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+import mutable.{ Builder, ArrayBuffer }
+import TraversableView.NoBuilder
+import annotation.migration
+
+
+
+private[collection] trait GenTraversableViewLike[+A,
+ +Coll,
+ +This <: GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This]]
+extends GenTraversable[A] with GenTraversableLike[A, This] {
+self =>
+
+ def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]): That
+
+ protected def underlying: Coll
+ protected[this] def viewIdentifier: String
+ protected[this] def viewIdString: String
+ def viewToString = stringPrefix + viewIdString + "(...)"
+
+ /** The implementation base trait of this view.
+ * This trait and all its subtraits has to be re-implemented for each
+ * ViewLike class.
+ */
+ trait Transformed[+B] extends GenTraversableView[B, Coll] {
+ def foreach[U](f: B => U): Unit
+
+ lazy val underlying = self.underlying
+ final override protected[this] def viewIdString = self.viewIdString + viewIdentifier
+ override def stringPrefix = self.stringPrefix
+ override def toString = viewToString
+ }
+
+ trait EmptyView extends Transformed[Nothing] {
+ final override def isEmpty = true
+ final override def foreach[U](f: Nothing => U): Unit = ()
+ }
+
+ /** A fall back which forces everything into a vector and then applies an operation
+ * on it. Used for those operations which do not naturally lend themselves to a view
+ */
+ trait Forced[B] extends Transformed[B] {
+ protected[this] val forced: GenSeq[B]
+ def foreach[U](f: B => U) = forced foreach f
+ final override protected[this] def viewIdentifier = "C"
+ }
+
+ trait Sliced extends Transformed[A] {
+ protected[this] val endpoints: SliceInterval
+ protected[this] def from = endpoints.from
+ protected[this] def until = endpoints.until
+ // protected def newSliced(_endpoints: SliceInterval): Transformed[A] =
+ // self.newSliced(endpoints.recalculate(_endpoints))
+
+ def foreach[U](f: A => U) {
+ var index = 0
+ for (x <- self) {
+ if (from <= index) {
+ if (until <= index) return
+ f(x)
+ }
+ index += 1
+ }
+ }
+ final override protected[this] def viewIdentifier = "S"
+ }
+
+ trait Mapped[B] extends Transformed[B] {
+ protected[this] val mapping: A => B
+ def foreach[U](f: B => U) {
+ for (x <- self)
+ f(mapping(x))
+ }
+ final override protected[this] def viewIdentifier = "M"
+ }
+
+ trait FlatMapped[B] extends Transformed[B] {
+ protected[this] val mapping: A => GenTraversableOnce[B]
+ def foreach[U](f: B => U) {
+ for (x <- self)
+ for (y <- mapping(x).seq)
+ f(y)
+ }
+ final override protected[this] def viewIdentifier = "N"
+ }
+
+ trait Appended[B >: A] extends Transformed[B] {
+ protected[this] val rest: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ self foreach f
+ rest foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
+
+ trait Filtered extends Transformed[A] {
+ protected[this] val pred: A => Boolean
+ def foreach[U](f: A => U) {
+ for (x <- self)
+ if (pred(x)) f(x)
+ }
+ final override protected[this] def viewIdentifier = "F"
+ }
+
+ trait TakenWhile extends Transformed[A] {
+ protected[this] val pred: A => Boolean
+ def foreach[U](f: A => U) {
+ for (x <- self) {
+ if (!pred(x)) return
+ f(x)
+ }
+ }
+ final override protected[this] def viewIdentifier = "T"
+ }
+
+ trait DroppedWhile extends Transformed[A] {
+ protected[this] val pred: A => Boolean
+ def foreach[U](f: A => U) {
+ var go = false
+ for (x <- self) {
+ if (!go && !pred(x)) go = true
+ if (go) f(x)
+ }
+ }
+ final override protected[this] def viewIdentifier = "D"
+ }
+
+}
+
+
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index ef86fad313..04d89299c8 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -73,7 +73,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op)
override /*IterableLike*/
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
case that: IndexedSeq[_] =>
val b = bf(repr)
var i = 0
@@ -154,7 +154,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p))
override /*IterableLike*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that: IndexedSeq[_] =>
val len = length
len == that.length && {
@@ -231,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
}
override /*SeqLike*/
- def startsWith[B](that: Seq[B], offset: Int): Boolean = that match {
+ def startsWith[B](that: GenSeq[B], offset: Int): Boolean = that match {
case that: IndexedSeq[_] =>
var i = offset
var j = 0
@@ -256,7 +256,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
}
override /*SeqLike*/
- def endsWith[B](that: Seq[B]): Boolean = that match {
+ def endsWith[B](that: GenSeq[B]): Boolean = that match {
case that: IndexedSeq[_] =>
var i = length - 1
var j = that.length - 1
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 0549ab3be8..03a8302116 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -18,16 +18,19 @@ import mutable.Builder
* $iterableInfo
*/
trait Iterable[+A] extends Traversable[A]
+ with GenIterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]] {
- override def companion: GenericCompanion[Iterable] = Iterable
+ override def companion: GenericCompanion[Iterable] = Iterable
+
+ override def seq = this
/* The following methods are inherited from trait IterableLike
*
override def iterator: Iterator[A]
override def takeRight(n: Int): Iterable[A]
override def dropRight(n: Int): Iterable[A]
- override def sameElements[B >: A](that: Iterable[B]): Boolean
+ override def sameElements[B >: A](that: GenIterable[B]): Boolean
override def view
override def view(from: Int, until: Int)
*/
@@ -47,13 +50,13 @@ object Iterable extends TraversableFactory[Iterable] {
def newBuilder[A]: Builder[A, Iterable[A]] = immutable.Iterable.newBuilder[A]
/** The minimum element of a non-empty sequence of ordered elements */
- @deprecated("use <seq>.min instead, where <seq> is the sequence for which you want to compute the minimum")
+ @deprecated("use <seq>.min instead, where <seq> is the sequence for which you want to compute the minimum", "2.8.0")
def min[A](seq: Iterable[A])(implicit ord: Ordering[A]): A = seq.min
/** The maximum element of a non-empty sequence of ordered elements */
- @deprecated("use <seq>.max instead, where <seq> is the sequence for which you want to compute the maximum")
+ @deprecated("use <seq>.max instead, where <seq> is the sequence for which you want to compute the maximum", "2.8.0")
def max[A](seq: Iterable[A])(implicit ord: Ordering[A]): A = seq.max
- @deprecated("use View instead")
+ @deprecated("use View instead", "2.8.0")
type Projection[A] = IterableView[A, Coll]
}
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 062b3c207f..458b26207e 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -8,10 +8,12 @@
package scala.collection
+
import generic._
import immutable.{ List, Stream }
import annotation.unchecked.uncheckedVariance
+
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
* @define iterableInfo
@@ -46,16 +48,8 @@ import annotation.unchecked.uncheckedVariance
*
* @define Coll Iterable
* @define coll iterable collection
- * @define zipthatinfo the class of the returned collection. Where possible, `That` is
- * the same class as the current collection class `Repr`, but this
- * depends on the element type `(A1, B)` being admissible for that class,
- * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, B), That]`.
- * is found.
- * @define zipbfinfo an implicit value of class `CanBuildFrom` which determines the
- * result class `That` from the current representation type `Repr`
- * and the new element type `(A1, B)`.
*/
-trait IterableLike[+A, +Repr] extends Equals with TraversableLike[A, Repr] {
+trait IterableLike[+A, +Repr] extends Equals with TraversableLike[A, Repr] with GenIterableLike[A, Repr] {
self =>
override protected[this] def thisCollection: Iterable[A] = this.asInstanceOf[Iterable[A]]
@@ -194,31 +188,7 @@ self =>
}
}
- /** Returns a $coll formed from this $coll and another iterable collection
- * by combining corresponding elements in pairs.
- * If one of the two collections is longer than the other, its remaining elements are ignored.
- *
- * $orderDependent
- *
- * @param that The iterable providing the second half of each result pair
- * @tparam A1 the type of the first half of the returned pairs (this is always a supertype
- * of the collection's element type `A`).
- * @tparam B the type of the second half of the returned pairs
- * @tparam That $zipthatinfo
- * @param bf $zipbfinfo
- * @return a new collection of type `That` containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the minimum of the lengths of this $coll and `that`.
- *
- * @usecase def zip[B](that: Iterable[B]): $Coll[(A, B)]
- *
- * @param that The iterable providing the second half of each result pair
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the minimum of the lengths of this $coll and `that`.
- */
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
val these = this.iterator
val those = that.iterator
@@ -227,35 +197,7 @@ self =>
b.result
}
- /** Returns a $coll formed from this $coll and another iterable collection
- * by combining corresponding elements in pairs.
- * If one of the two collections is shorter than the other,
- * placeholder elements are used to extend the shorter collection to the length of the longer.
- *
- * $orderDependent
- *
- * @param that the iterable providing the second half of each result pair
- * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
- * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
- * @return a new collection of type `That` containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the maximum of the lengths of this $coll and `that`.
- * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
- * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
- *
- * @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
- *
- * @param that The iterable providing the second half of each result pair
- * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
- * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the maximum of the lengths of this $coll and `that`.
- * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
- * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
- */
- def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
+ def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
val these = this.iterator
val those = that.iterator
@@ -268,31 +210,6 @@ self =>
b.result
}
- /** Zips this $coll with its indices.
- *
- * $orderDependent
- *
- * @tparam A1 the type of the first half of the returned pairs (this is always a supertype
- * of the collection's element type `A`).
- * @tparam That the class of the returned collection. Where possible, `That` is
- * the same class as the current collection class `Repr`, but this
- * depends on the element type `(A1, Int)` being admissible for that class,
- * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
- * is found.
- * @tparam bf an implicit value of class `CanBuildFrom` which determines the
- * result class `That` from the current representation type `Repr`
- * and the new element type `(A1, Int)`.
- * @return A new collection of type `That` containing pairs consisting of all elements of this
- * $coll paired with their index. Indices start at `0`.
- *
- * @usecase def zipWithIndex: $Coll[(A, Int)]
- *
- * @return A new $coll containing pairs consisting of all elements of this
- * $coll paired with their index. Indices start at `0`.
- * @example
- * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
- *
- */
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
val b = bf(repr)
var i = 0
@@ -303,21 +220,7 @@ self =>
b.result
}
- /** Checks if the other iterable collection contains the same elements in the same order as this $coll.
- *
- * $orderDependent
- * $willNotTerminateInf
- *
- * @param that the collection to compare with.
- * @tparam B the type of the elements of collection `that`.
- * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
- *
- * @usecase def sameElements(that: Iterable[A]): Boolean
- *
- * @param that the collection to compare with.
- * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
- */
- def sameElements[B >: A](that: Iterable[B]): Boolean = {
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = {
val these = this.iterator
val those = that.iterator
while (these.hasNext && those.hasNext)
@@ -344,20 +247,20 @@ self =>
override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until)
- @deprecated("use `iterator' instead")
+ @deprecated("use `iterator' instead", "2.8.0")
def elements = iterator
- @deprecated("use `head' instead") def first: A = head
+ @deprecated("use `head' instead", "2.8.0") def first: A = head
/** `None` if iterable is empty.
*/
- @deprecated("use `headOption' instead") def firstOption: Option[A] = headOption
+ @deprecated("use `headOption' instead", "2.8.0") def firstOption: Option[A] = headOption
/**
* returns a projection that can be used to call non-strict `filter`,
* `map`, and `flatMap` methods that build projections
* of the collection.
*/
- @deprecated("use `view' instead")
+ @deprecated("use `view' instead", "2.8.0")
def projection = view
}
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 6bc0f155a8..0fe501ac6b 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -31,10 +31,10 @@ trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
override def sliding[B >: A](size: Int, step: Int): Iterator[Repr] = self.sliding(size, step)
override def takeRight(n: Int): Repr = self.takeRight(n)
override def dropRight(n: Int): Repr = self.dropRight(n)
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
- override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
+ override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
+ override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = self.zipWithIndex(bf)
- override def sameElements[B >: A](that: Iterable[B]): Boolean = self.sameElements(that)
+ override def sameElements[B >: A](that: GenIterable[B]): Boolean = self.sameElements(that)
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
}
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 0cde62fb99..9ff6762ae8 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -16,7 +16,7 @@ import TraversableView.NoBuilder
/** A base trait for non-strict views of `Iterable`s.
* $iterableViewInfo
*/
-trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]]
+trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]] with GenIterableView[A, Coll]
/** An object containing the necessary implicit definitions to make
* `IterableView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 6acef1889f..e0e1329844 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -32,96 +32,68 @@ trait IterableViewLike[+A,
with IterableLike[A, This]
with TraversableView[A, Coll]
with TraversableViewLike[A, Coll, This]
+ with GenIterableViewLike[A, Coll, This]
{ self =>
- trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B] {
+ trait Transformed[+B] extends IterableView[B, Coll] with super[TraversableViewLike].Transformed[B] with super[GenIterableViewLike].Transformed[B] {
def iterator: Iterator[B]
override def foreach[U](f: B => U): Unit = iterator foreach f
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] with super.EmptyView {
- final def iterator: Iterator[Nothing] = Iterator.empty
- }
+ trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
- trait Forced[B] extends super.Forced[B] with Transformed[B] {
- def iterator = forced.iterator
- }
+ trait Forced[B] extends super[TraversableViewLike].Forced[B] with super[GenIterableViewLike].Forced[B] with Transformed[B]
- trait Sliced extends super.Sliced with Transformed[A] {
- def iterator: Iterator[A] = self.iterator.slice(from, until)
- }
+ trait Sliced extends super[TraversableViewLike].Sliced with super[GenIterableViewLike].Sliced with Transformed[A]
- trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
- def iterator = self.iterator map mapping
- }
+ trait Mapped[B] extends super[TraversableViewLike].Mapped[B] with super[GenIterableViewLike].Mapped[B] with Transformed[B]
- trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
- def iterator: Iterator[B] = self.iterator flatMap mapping
- }
+ trait FlatMapped[B] extends super[TraversableViewLike].FlatMapped[B] with super[GenIterableViewLike].FlatMapped[B] with Transformed[B]
- trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
- def iterator = self.iterator ++ rest
- }
+ trait Appended[B >: A] extends super[TraversableViewLike].Appended[B] with super[GenIterableViewLike].Appended[B] with Transformed[B]
- trait Filtered extends super.Filtered with Transformed[A] {
- def iterator = self.iterator filter pred
- }
+ trait Filtered extends super[TraversableViewLike].Filtered with super[GenIterableViewLike].Filtered with Transformed[A]
- trait TakenWhile extends super.TakenWhile with Transformed[A] {
- def iterator = self.iterator takeWhile pred
- }
+ trait TakenWhile extends super[TraversableViewLike].TakenWhile with super[GenIterableViewLike].TakenWhile with Transformed[A]
- trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
- def iterator = self.iterator dropWhile pred
- }
+ trait DroppedWhile extends super[TraversableViewLike].DroppedWhile with super[GenIterableViewLike].DroppedWhile with Transformed[A]
- trait Zipped[B] extends Transformed[(A, B)] {
- protected[this] val other: Iterable[B]
- def iterator: Iterator[(A, B)] = self.iterator zip other.iterator
- final override protected[this] def viewIdentifier = "Z"
- }
-
- trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
- protected[this] val other: Iterable[B]
- protected[this] val thisElem: A1
- protected[this] val thatElem: B
- final override protected[this] def viewIdentifier = "Z"
- def iterator: Iterator[(A1, B)] =
- self.iterator.zipAll(other.iterator, thisElem, thatElem)
- }
+ trait Zipped[B] extends Transformed[(A, B)] with super[GenIterableViewLike].Zipped[B]
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = {
- newZipped(that).asInstanceOf[That]
-// was: val b = bf(repr)
-// if (b.isInstanceOf[NoBuilder[_]]) newZipped(that).asInstanceOf[That]
-// else super.zip[A1, B, That](that)(bf)
- }
-
- override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That =
- zip[A1, Int, That](Stream from 0)(bf)
-
- override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That =
- newZippedAll(that, thisElem, thatElem).asInstanceOf[That]
+ trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] with super[GenIterableViewLike].ZippedAll[A1, B]
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
- val other: Iterable[B] = that
+ protected def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
+ val other: GenIterable[B] = that
val thisElem = _thisElem
val thatElem = _thatElem
} with ZippedAll[A1, B]
- protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
+ override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = {
+ newZipped(that).asInstanceOf[That]
+// was: val b = bf(repr)
+// if (b.isInstanceOf[NoBuilder[_]]) newZipped(that).asInstanceOf[That]
+// else super.zip[A1, B, That](that)(bf)
+ }
+
+ override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That =
+ zip[A1, Int, That](Stream from 0)(bf)
+
+ override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That =
+ newZippedAll(that, thisElem, thatElem).asInstanceOf[That]
+
override def grouped(size: Int): Iterator[This] =
self.iterator grouped size map (x => newForced(x).asInstanceOf[This])
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 4e349cb423..7bd33cbb23 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -149,13 +149,13 @@ object Iterator {
def next = elem
}
- @deprecated("use `xs.iterator' or `Iterator(xs)' instead")
+ @deprecated("use `xs.iterator' or `Iterator(xs)' instead", "2.8.0")
def fromValues[a](xs: a*) = xs.iterator
/** @param xs the array of elements
* @see also: IndexedSeq.iterator and slice
*/
- @deprecated("use `xs.iterator' instead")
+ @deprecated("use `xs.iterator' instead", "2.8.0")
def fromArray[a](xs: Array[a]): Iterator[a] =
fromArray(xs, 0, xs.length)
@@ -165,7 +165,7 @@ object Iterator {
* @param length the length
* @see also: IndexedSeq.iterator and slice
*/
- @deprecated("use `xs.slice(start, start + length).iterator' instead")
+ @deprecated("use `xs.slice(start, start + length).iterator' instead", "2.8.0")
def fromArray[a](xs: Array[a], start: Int, length: Int): Iterator[a] =
xs.slice(start, start + length).iterator
@@ -173,7 +173,7 @@ object Iterator {
* @param n the product arity
* @return the iterator on `Product&lt;n&gt;`.
*/
- @deprecated("use product.productIterator instead")
+ @deprecated("use product.productIterator instead", "2.8.0")
def fromProduct(n: Product): Iterator[Any] = new Iterator[Any] {
private var c: Int = 0
private val cmax = n.productArity
@@ -192,7 +192,7 @@ object Iterator {
* @param step the increment function of the iterator, must be monotonically increasing or decreasing
* @return the iterator with values in range `[start;end)`.
*/
- @deprecated("use Iterator.iterate(start, end - start)(step) instead")
+ @deprecated("use Iterator.iterate(start, end - start)(step) instead", "2.8.0")
def range(start: Int, end: Int, step: Int => Int) = new Iterator[Int] {
private val up = step(start) > start
private val down = step(start) < start
@@ -211,7 +211,7 @@ object Iterator {
* @param step the increment function of the iterator
* @return the iterator starting at value `start`.
*/
- @deprecated("use iterate(start)(step) instead")
+ @deprecated("use iterate(start)(step) instead", "2.8.0")
def from(start: Int, step: Int => Int): Iterator[Int] = new Iterator[Int] {
private var i = start
override def hasNext: Boolean = true
@@ -223,7 +223,7 @@ object Iterator {
* @param its The iterator which returns on each call to next
* a new iterator whose elements are to be concatenated to the result.
*/
- @deprecated("use its.flatten instead")
+ @deprecated("use its.flatten instead", "2.8.0")
def flatten[T](its: Iterator[Iterator[T]]): Iterator[T] = new Iterator[T] {
private var cur = its.next
def hasNext: Boolean = {
@@ -253,6 +253,8 @@ import Iterator.empty
trait Iterator[+A] extends TraversableOnce[A] {
self =>
+ def seq: Iterator[A] = this
+
/** Tests whether this iterator can provide another element.
* @return `true` if a subsequent call to `next` will yield an element,
* `false` otherwise.
@@ -340,7 +342,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* iterator followed by the values produced by iterator `that`.
* @usecase def ++(that: => Iterator[A]): Iterator[A]
*/
- def ++[B >: A](that: => TraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator[B] {
// optimize a little bit to prevent n log n behavior.
private var cur : Iterator[B] = self
// since that is by-name, make sure it's only referenced once -
@@ -364,7 +366,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return the iterator resulting from applying the given iterator-valued function
* `f` to each value produced by this iterator and concatenating the results.
*/
- def flatMap[B](f: A => TraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new Iterator[B] {
private var cur: Iterator[B] = empty
def hasNext: Boolean =
cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext }
@@ -1024,16 +1026,16 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Returns a new iterator that first yields the elements of this
* iterator followed by the elements provided by iterator `that`.
*/
- @deprecated("use `++`")
+ @deprecated("use `++`", "2.3.2")
def append[B >: A](that: Iterator[B]) = self ++ that
/** Returns index of the first element satisfying a predicate, or -1. */
- @deprecated("use `indexWhere` instead")
+ @deprecated("use `indexWhere` instead", "2.8.0")
def findIndexOf(p: A => Boolean): Int = indexWhere(p)
/** Returns a counted iterator from this iterator.
*/
- @deprecated("use zipWithIndex in Iterator")
+ @deprecated("use zipWithIndex in Iterator", "2.8.0")
def counted = new CountedIterator[A] {
private var cnt = 0
def count = cnt
@@ -1050,7 +1052,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @param start the starting index.
* @param sz the maximum number of elements to be read.
*/
- @deprecated("use copyToArray instead")
+ @deprecated("use copyToArray instead", "2.8.0")
def readInto[B >: A](xs: Array[B], start: Int, sz: Int) {
var i = start
while (hasNext && i - start < sz) {
@@ -1059,12 +1061,12 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
- @deprecated("use copyToArray instead")
+ @deprecated("use copyToArray instead", "2.8.0")
def readInto[B >: A](xs: Array[B], start: Int) {
readInto(xs, start, xs.length - start)
}
- @deprecated("use copyToArray instead")
+ @deprecated("use copyToArray instead", "2.8.0")
def readInto[B >: A](xs: Array[B]) {
readInto(xs, 0, xs.length)
}
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index d71208f752..fdb77a48c1 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -73,7 +73,7 @@ object JavaConversions {
case _ => IteratorWrapper(i)
}
- @deprecated("use asJavaIterator instead")
+ @deprecated("use asJavaIterator instead", "2.8.1")
def asIterator[A](i : Iterator[A]): ju.Iterator[A] = asJavaIterator[A](i)
/**
@@ -94,7 +94,7 @@ object JavaConversions {
case _ => IteratorWrapper(i)
}
- @deprecated("use asJavaEnmeration instead")
+ @deprecated("use asJavaEnmeration instead", "2.8.1")
def asEnumeration[A](i : Iterator[A]): ju.Enumeration[A] = asJavaEnumeration[A](i)
/**
@@ -115,7 +115,7 @@ object JavaConversions {
case _ => IterableWrapper(i)
}
- @deprecated("use asJavaIterable instead")
+ @deprecated("use asJavaIterable instead", "2.8.1")
def asIterable[A](i : Iterable[A]): jl.Iterable[A] = asJavaIterable[A](i)
/**
@@ -134,7 +134,7 @@ object JavaConversions {
case _ => new IterableWrapper(i)
}
- @deprecated("use asJavaCollection instead")
+ @deprecated("use asJavaCollection instead", "2.8.1")
def asCollection[A](i : Iterable[A]): ju.Collection[A] = asJavaCollection[A](i)
/**
@@ -154,9 +154,9 @@ object JavaConversions {
case JListWrapper(wrapped) => wrapped
case _ => new MutableBufferWrapper(b)
}
- @deprecated("use bufferAsJavaList instead")
+ @deprecated("use bufferAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
- @deprecated("use bufferAsJavaList instead")
+ @deprecated("use bufferAsJavaList instead", "2.8.1")
def asList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
/**
@@ -176,9 +176,9 @@ object JavaConversions {
case JListWrapper(wrapped) => wrapped
case _ => new MutableSeqWrapper(b)
}
- @deprecated("use mutableSeqAsJavaList instead")
+ @deprecated("use mutableSeqAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
- @deprecated("use mutableSeqAsJavaList instead")
+ @deprecated("use mutableSeqAsJavaList instead", "2.8.1")
def asList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
/**
@@ -199,9 +199,9 @@ object JavaConversions {
case _ => new SeqWrapper(b)
}
- @deprecated("use seqAsJavaList instead")
+ @deprecated("use seqAsJavaList instead", "2.9.0")
def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
- @deprecated("use seqAsJavaList instead")
+ @deprecated("use seqAsJavaList instead", "2.8.1")
def asList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
/**
@@ -222,9 +222,9 @@ object JavaConversions {
case _ => new MutableSetWrapper(s)
}
- @deprecated("use mutableSetAsJavaSet instead")
+ @deprecated("use mutableSetAsJavaSet instead", "2.9.0")
def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
- @deprecated("use mutableSetAsJavaSet instead")
+ @deprecated("use mutableSetAsJavaSet instead", "2.8.1")
def asSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
/**
@@ -245,9 +245,9 @@ object JavaConversions {
case _ => new SetWrapper(s)
}
- @deprecated("use setAsJavaSet instead")
+ @deprecated("use setAsJavaSet instead", "2.9.0")
def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
- @deprecated("use setAsJavaSet instead")
+ @deprecated("use setAsJavaSet instead", "2.8.1")
def asSet[A](s : Set[A]): ju.Set[A] = setAsJavaSet[A](s)
/**
@@ -269,9 +269,9 @@ object JavaConversions {
case _ => new MutableMapWrapper(m)
}
- @deprecated("use mutableMapAsJavaMap instead")
+ @deprecated("use mutableMapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
- @deprecated("use mutableMapAsJavaMap instead")
+ @deprecated("use mutableMapAsJavaMap instead", "2.8.1")
def asMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
/**
@@ -293,7 +293,7 @@ object JavaConversions {
case _ => new DictionaryWrapper(m)
}
- @deprecated("use asJavaDictionary instead")
+ @deprecated("use asJavaDictionary instead", "2.8.1")
def asDictionary[A, B](m : mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary[A, B](m)
/**
@@ -315,9 +315,9 @@ object JavaConversions {
case _ => new MapWrapper(m)
}
- @deprecated("use mapAsJavaMap instead")
+ @deprecated("use mapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
- @deprecated("use mapAsJavaMap instead")
+ @deprecated("use mapAsJavaMap instead", "2.8.1")
def asMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
/**
@@ -338,7 +338,7 @@ object JavaConversions {
case _ => new ConcurrentMapWrapper(m)
}
- @deprecated("use asJavaConcurrentMap instead")
+ @deprecated("use asJavaConcurrentMap instead", "2.8.1")
def asConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = asJavaConcurrentMap[A, B](m)
// Java => Scala
@@ -361,7 +361,7 @@ object JavaConversions {
case _ => JIteratorWrapper(i)
}
- @deprecated("use asScalaIterator instead")
+ @deprecated("use asScalaIterator instead", "2.8.1")
def asIterator[A](i : ju.Iterator[A]): Iterator[A] = asScalaIterator[A](i)
/**
@@ -382,7 +382,7 @@ object JavaConversions {
case _ => JEnumerationWrapper(i)
}
- @deprecated("use enumerationAsScalaIterator instead")
+ @deprecated("use enumerationAsScalaIterator instead", "2.8.1")
def asIterator[A](i : ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator[A](i)
/**
@@ -403,9 +403,9 @@ object JavaConversions {
case _ => JIterableWrapper(i)
}
- @deprecated("use iterableAsScalaIterable instead")
+ @deprecated("use iterableAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
- @deprecated("use iterableAsScalaIterable instead")
+ @deprecated("use iterableAsScalaIterable instead", "2.8.1")
def asIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
/**
@@ -422,9 +422,9 @@ object JavaConversions {
case IterableWrapper(wrapped) => wrapped
case _ => JCollectionWrapper(i)
}
- @deprecated("use collectionAsScalaIterable instead")
+ @deprecated("use collectionAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
- @deprecated("use collectionAsScalaIterable instead")
+ @deprecated("use collectionAsScalaIterable instead", "2.8.1")
def asIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
/**
@@ -445,7 +445,7 @@ object JavaConversions {
case _ =>new JListWrapper(l)
}
- @deprecated("use asScalaBuffer instead")
+ @deprecated("use asScalaBuffer instead", "2.8.1")
def asBuffer[A](l : ju.List[A]): mutable.Buffer[A] = asScalaBuffer[A](l)
/**
@@ -466,7 +466,7 @@ object JavaConversions {
case _ =>new JSetWrapper(s)
}
- @deprecated("use asScalaSet instead")
+ @deprecated("use asScalaSet instead", "2.8.1")
def asSet[A](s : ju.Set[A]): mutable.Set[A] = asScalaSet[A](s)
/**
@@ -488,9 +488,9 @@ object JavaConversions {
case _ => new JMapWrapper(m)
}
- @deprecated("use mapAsScalaMap instead")
+ @deprecated("use mapAsScalaMap instead", "2.9.0")
def asScalaMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
- @deprecated("use mapAsScalaMap instead")
+ @deprecated("use mapAsScalaMap instead", "2.8.1")
def asMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
/**
@@ -511,7 +511,7 @@ object JavaConversions {
case _ => new JConcurrentMapWrapper(m)
}
- @deprecated("use asScalaConcurrentMap instead")
+ @deprecated("use asScalaConcurrentMap instead", "2.8.1")
def asConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = asScalaConcurrentMap[A, B](m)
/**
@@ -528,7 +528,7 @@ object JavaConversions {
case _ => new JDictionaryWrapper(p)
}
- @deprecated("use dictionaryAsScalaMap instead")
+ @deprecated("use dictionaryAsScalaMap instead", "2.8.1")
def asMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap[A, B](p)
/**
@@ -544,9 +544,9 @@ object JavaConversions {
case _ => new JPropertiesWrapper(p)
}
- @deprecated("use propertiesAsScalaMap instead")
+ @deprecated("use propertiesAsScalaMap instead", "2.9.0")
def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
- @deprecated("use propertiesAsScalaMap instead")
+ @deprecated("use propertiesAsScalaMap instead", "2.8.1")
def asMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
// Private implementations (shared by JavaConverters) ...
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index b68578fd21..4bb61b960a 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -210,11 +210,11 @@ object JavaConverters {
implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
new AsJava(seqAsJavaList(b))
- @deprecated("Use bufferAsJavaListConverter instead")
+ @deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
- @deprecated("Use mutableSeqAsJavaListConverter instead")
+ @deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
- @deprecated("Use seqAsJavaListConverter instead")
+ @deprecated("Use seqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
/**
@@ -233,7 +233,7 @@ object JavaConverters {
implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
new AsJava(mutableSetAsJavaSet(s))
- @deprecated("Use mutableSetAsJavaSetConverter instead")
+ @deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
/**
@@ -252,7 +252,7 @@ object JavaConverters {
implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
new AsJava(setAsJavaSet(s))
- @deprecated("Use setAsJavaSetConverter instead")
+ @deprecated("Use setAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
/**
@@ -271,7 +271,7 @@ object JavaConverters {
implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
new AsJava(mutableMapAsJavaMap(m))
- @deprecated("use mutableMapAsJavaMapConverter instead")
+ @deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
/**
@@ -306,7 +306,7 @@ object JavaConverters {
implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
new AsJava(mapAsJavaMap(m))
- @deprecated("Use mapAsJavaMapConverter instead")
+ @deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
/**
@@ -373,7 +373,7 @@ object JavaConverters {
implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
new AsScala(iterableAsScalaIterable(i))
- @deprecated("Use iterableAsScalaIterableConverter instead")
+ @deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
/**
@@ -389,7 +389,7 @@ object JavaConverters {
implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
new AsScala(collectionAsScalaIterable(i))
- @deprecated("Use collectionAsScalaIterableConverter instead")
+ @deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
/**
@@ -440,7 +440,7 @@ object JavaConverters {
implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
new AsScala(mapAsScalaMap(m))
- @deprecated("Use mapAsScalaMapConverter instead")
+ @deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
/**
@@ -483,7 +483,7 @@ object JavaConverters {
implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
new AsScala(propertiesAsScalaMap(p))
- @deprecated("Use propertiesAsScalaMapConverter instead")
+ @deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
propertiesAsScalaMapConverter(p)
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index a1becf8d68..7c5cba0665 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -232,7 +232,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
override /*IterableLike*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that1: LinearSeq[_] =>
var these = this
var those = that1
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 7dcaafe45d..71b2e7fb1e 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -24,9 +24,9 @@ import generic._
* @tparam A the type of the keys in this map.
* @tparam B the type of the values associated with keys.
*
- * @since 1
+ * @since 1.0
*/
-trait Map[A, +B] extends Iterable[(A, B)] with MapLike[A, B, Map[A, B]] {
+trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, Map[A, B]] {
def empty: Map[A, B] = Map.empty
override def seq: Map[A, B] = this
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index fd58540086..a832f9089d 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -57,8 +57,10 @@ import parallel.ParMap
trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends PartialFunction[A, B]
with IterableLike[(A, B), This]
+ with GenMapLike[A, B, This]
with Subtractable[A, This]
- with Parallelizable[(A, B), ParMap[A, B]] {
+ with Parallelizable[(A, B), ParMap[A, B]]
+{
self =>
/** The empty map of the same type as this map
@@ -246,7 +248,8 @@ self =>
def get(key: A) = self.get(key).map(f)
}
- @deprecated("use `mapValues' instead") def mapElements[C](f: B => C) = mapValues(f)
+ @deprecated("use `mapValues' instead", "2.8.0")
+ def mapElements[C](f: B => C) = mapValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
// generic, returning This[B]. We need better covariance support to express that though.
@@ -284,8 +287,8 @@ self =>
* @return a new map with the given bindings added to this map
* @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
*/
- def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: xs) (_ + _)
+ def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
+ ((repr: Map[A, B1]) /: xs.seq) (_ + _)
/** Returns a new map with all key/value pairs for which the predicate
* `p` returns `true`.
@@ -338,36 +341,4 @@ self =>
override /*PartialFunction*/
def toString = super[IterableLike].toString
- // This hash code must be symmetric in the contents but ought not
- // collide trivially.
- override def hashCode() = util.MurmurHash.symmetricHash(this,Map.hashSeed)
-
- /** Compares two maps structurally; i.e. checks if all mappings
- * contained in this map are also contained in the other map,
- * and vice versa.
- *
- * @param that the other map
- * @return `true` if both maps contain exactly the
- * same mappings, `false` otherwise.
- */
- override def equals(that: Any): Boolean = that match {
- case that: Map[b, _] =>
- (this eq that) ||
- (that canEqual this) &&
- (this.size == that.size) && {
- try {
- this forall {
- case (k, v) => that.get(k.asInstanceOf[b]) match {
- case Some(`v`) =>
- true
- case _ => false
- }
- }
- } catch {
- case ex: ClassCastException =>
- println("class cast "); false
- }}
- case _ =>
- false
- }
}
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 7511897f36..7352c01ee1 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -42,7 +42,7 @@ trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
override def mapValues[C](f: B => C) = self.mapValues(f)
override def updated [B1 >: B](key: A, value: B1): Map[A, B1] = self.updated(key, value)
override def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] = self.+(kv1, kv2, kvs: _*)
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs)
override def filterNot(p: ((A, B)) => Boolean) = self filterNot p
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder =
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index 037abde2b6..930c3394dd 100644
--- a/src/library/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
@@ -8,9 +8,9 @@
package scala.collection
-/** A marker trait for objects with parallelised operations.
+/** A marker trait for collections which have their operations parallelised.
*
* @since 2.9
- * @author prokopec
+ * @author Aleksandar Prokopec
*/
trait Parallel
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index 10ab248626..59b37aff96 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -18,7 +18,8 @@ import parallel.Combiner
* @tparam ParRepr the actual type of the collection, which has to be parallel
*/
trait Parallelizable[+A, +ParRepr <: Parallel] {
- self: TraversableOnce[A] =>
+
+ def seq: TraversableOnce[A]
/** Returns a parallel implementation of this collection.
*
@@ -37,7 +38,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] {
*/
def par: ParRepr = {
val cb = parCombiner
- for (x <- this) cb += x
+ for (x <- seq) cb += x
cb.result
}
@@ -48,3 +49,4 @@ trait Parallelizable[+A, +ParRepr <: Parallel] {
*/
protected[this] def parCombiner: Combiner[A, ParRepr]
}
+
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index bbf392ab32..abcc0b5780 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -18,6 +18,7 @@ import mutable.Builder
*/
trait Seq[+A] extends PartialFunction[Int, A]
with Iterable[A]
+ with GenSeq[A]
with GenericTraversableTemplate[A, Seq]
with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
@@ -39,10 +40,10 @@ object Seq extends SeqFactory[Seq] {
def newBuilder[A]: Builder[A, Seq[A]] = immutable.Seq.newBuilder[A]
- @deprecated("use View instead")
+ @deprecated("use View instead", "2.8.0")
type Projection[A] = SeqView[A, Coll]
- @deprecated("use Seq(value) instead")
+ @deprecated("use Seq(value) instead", "2.8.0")
def singleton[A](value: A) = Seq(value)
}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index fe42f25dea..87649e8b03 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -58,34 +58,14 @@ import parallel.ParSeq
* and the new element type `B`.
* @define orderDependent
* @define orderDependentFold
- * @define mayNotTerminateInf
- *
- * Note: may not terminate for infinite-sized collections.
- * @define willNotTerminateInf
- *
- * Note: will not terminate for infinite-sized collections.
*/
-trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self =>
+trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self =>
override protected[this] def thisCollection: Seq[A] = this.asInstanceOf[Seq[A]]
override protected[this] def toCollection(repr: Repr): Seq[A] = repr.asInstanceOf[Seq[A]]
- /** The length of the $coll.
- *
- * $willNotTerminateInf
- *
- * Note: `xs.length` and `xs.size` yield the same result.
- *
- * @return the number of elements in this $coll.
- */
def length: Int
- /** Selects an element by its index in the $coll.
- *
- * @param idx The index to select.
- * @return the element of this $coll at index `idx`, where `0` indicates the first element.
- * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
- */
def apply(idx: Int): A
protected[this] override def parCombiner = ParSeq.newCombiner[A]
@@ -119,25 +99,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
*/
override def size = length
- /** Tests whether this $coll contains given index.
- *
- * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into
- * a `PartialFunction[Int, A]`.
- *
- * @param idx the index to test
- * @return `true` if this $coll contains an element at position `idx`, `false` otherwise.
- */
- def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length)
-
- /** Computes length of longest segment whose elements all satisfy some predicate.
- *
- * $mayNotTerminateInf
- *
- * @param p the predicate used to test elements.
- * @param from the index where the search starts.
- * @return the length of the longest segment of this $coll starting from index `from`
- * such that every element of the segment satisfies the predicate `p`.
- */
def segmentLength(p: A => Boolean, from: Int): Int = {
var i = 0
var it = iterator.drop(from)
@@ -146,35 +107,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
i
}
- /** Returns the length of the longest prefix whose elements all satisfy some predicate.
- *
- * $mayNotTerminateInf
- *
- * @param p the predicate used to test elements.
- * @return the length of the longest prefix of this $coll
- * such that every element of the segment satisfies the predicate `p`.
- */
- def prefixLength(p: A => Boolean) = segmentLength(p, 0)
-
- /** Finds index of first element satisfying some predicate.
- *
- * $mayNotTerminateInf
- *
- * @param p the predicate used to test elements.
- * @return the index of the first element of this $coll that satisfies the predicate `p`,
- * or `-1`, if none exists.
- */
- def indexWhere(p: A => Boolean): Int = indexWhere(p, 0)
-
- /** Finds index of the first element satisfying some predicate after or at some start index.
- *
- * $mayNotTerminateInf
- *
- * @param p the predicate used to test elements.
- * @param from the start index
- * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
- * or `-1`, if none exists.
- */
def indexWhere(p: A => Boolean, from: Int): Int = {
var i = from
var it = iterator.drop(from)
@@ -188,77 +120,9 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
/** Returns index of the first element satisfying a predicate, or `-1`.
*/
- @deprecated("Use indexWhere(p) instead.")
+ @deprecated("Use indexWhere(p) instead.", "2.8.0")
def findIndexOf(p: A => Boolean): Int = indexWhere(p)
- /** Finds index of first occurrence of some value in this $coll.
- *
- * $mayNotTerminateInf
- *
- * @param elem the element value to search for.
- * @tparam B the type of the element `elem`.
- * @return the index of the first element of this $coll that is equal (wrt `==`)
- * to `elem`, or `-1`, if none exists.
- *
- * @usecase def indexOf(elem: A): Int
- */
- def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
-
- /** Finds index of first occurrence of some value in this $coll after or at some start index.
- *
- * $mayNotTerminateInf
- *
- * @param elem the element value to search for.
- * @tparam B the type of the element `elem`.
- * @param from the start index
- * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
- * to `elem`, or `-1`, if none exists.
- *
- * @usecase def indexOf(elem: A, from: Int): Int
- */
- def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
-
- /** Finds index of last occurrence of some value in this $coll.
- *
- * $willNotTerminateInf
- *
- * @param elem the element value to search for.
- * @tparam B the type of the element `elem`.
- * @return the index of the last element of this $coll that is equal (wrt `==`)
- * to `elem`, or `-1`, if none exists.
- *
- * @usecase def lastIndexOf(elem: A): Int
- */
- def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
-
- /** Finds index of last occurrence of some value in this $coll before or at a given end index.
- *
- * @param elem the element value to search for.
- * @param end the end index.
- * @tparam B the type of the element `elem`.
- * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
- * to `elem`, or `-1`, if none exists.
- *
- * @usecase def lastIndexOf(elem: A, end: Int): Int
- */
- def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
-
- /** Finds index of last element satisfying some predicate.
- *
- * $willNotTerminateInf
- *
- * @param p the predicate used to test elements.
- * @return the index of the last element of this $coll that satisfies the predicate `p`,
- * or `-1`, if none exists.
- */
- def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, length - 1)
-
- /** Finds index of last element satisfying some predicate before or at given end index.
- *
- * @param p the predicate used to test elements.
- * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`,
- * or `-1`, if none exists.
- */
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
var i = length - 1
val it = reverseIterator
@@ -394,12 +258,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
}
}
- /** Returns new $coll wih elements in reversed order.
- *
- * $willNotTerminateInf
- *
- * @return A new $coll with all elements of this $coll in reversed order.
- */
def reverse: Repr = {
var xs: List[A] = List()
for (x <- this)
@@ -411,27 +269,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
b.result
}
- /**
- * Builds a new collection by applying a function to all elements of this $coll and
- * collecting the results in reversed order.
- *
- * $willNotTerminateInf
- *
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- *
- * @param f the function to apply to each element.
- * @tparam B the element type of the returned collection.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` resulting from applying the given function
- * `f` to each element of this $coll and collecting the results in reversed order.
- *
- * @usecase def reverseMap[B](f: A => B): $Coll[B]
- *
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results in reversed order.
- */
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
var xs: List[A] = List()
for (x <- this.seq)
@@ -453,21 +290,10 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
*/
def reverseIterator: Iterator[A] = toCollection(reverse).iterator
- @deprecated("use `reverseIterator' instead")
+ @deprecated("use `reverseIterator' instead", "2.8.0")
def reversedElements = reverseIterator
- /** Tests whether this $coll contains the given sequence at a given index.
- *
- * If the both the receiver object, <code>this</code> and
- * the argument, <code>that</code> are infinite sequences
- * this method may not terminate.
- *
- * @param that the sequence to test
- * @param offset the index where the sequence is searched.
- * @return `true` if the sequence `that` is contained in this $coll at index `offset`,
- * otherwise `false`.
- */
- def startsWith[B](that: Seq[B], offset: Int): Boolean = {
+ def startsWith[B](that: GenSeq[B], offset: Int): Boolean = {
val i = this.iterator drop offset
val j = that.iterator
while (j.hasNext && i.hasNext)
@@ -477,19 +303,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
!j.hasNext
}
- /** Tests whether this $coll starts with the given sequence.
- *
- * @param that the sequence to test
- * @return `true` if this collection has `that` as a prefix, `false` otherwise.
- */
- def startsWith[B](that: Seq[B]): Boolean = startsWith(that, 0)
-
- /** Tests whether this $coll ends with the given sequence.
- * $willNotTerminateInf
- * @param that the sequence to test
- * @return `true` if this $coll has `that` as a suffix, `false` otherwise.
- */
- def endsWith[B](that: Seq[B]): Boolean = {
+ def endsWith[B](that: GenSeq[B]): Boolean = {
val i = this.iterator.drop(length - that.length)
val j = that.iterator
while (i.hasNext && j.hasNext)
@@ -505,7 +319,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the first index such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that, 0)
+ def indexOfSlice[B >: A](that: GenSeq[B]): Int = indexOfSlice(that, 0)
/** Finds first index after or at a start index where this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
@@ -514,9 +328,9 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the first index `>= from` such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def indexOfSlice[B >: A](that: Seq[B], from: Int): Int =
+ def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int =
if (this.hasDefiniteSize && that.hasDefiniteSize)
- SeqLike.indexOf(thisCollection, 0, length, that, 0, that.length, from)
+ SeqLike.indexOf(thisCollection, 0, length, that.seq, 0, that.length, from)
else {
var i = from
var s: Seq[A] = thisCollection drop i
@@ -536,7 +350,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the last index such that the elements of this $coll starting a this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that, length)
+ def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = lastIndexOfSlice(that, length)
/** Finds last index before or at a given end index where this $coll contains a given sequence as a slice.
* @param that the sequence to test
@@ -544,8 +358,8 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the last index `<= end` such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int =
- SeqLike.lastIndexOf(thisCollection, 0, length, that, 0, that.length, end)
+ def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int =
+ SeqLike.lastIndexOf(thisCollection, 0, length, that.seq, 0, that.length, end)
/** Tests whether this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
@@ -553,7 +367,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return `true` if this $coll contains a slice with the same elements
* as `that`, otherwise `false`.
*/
- def containsSlice[B](that: Seq[B]): Boolean = indexOfSlice(that) != -1
+ def containsSlice[B](that: GenSeq[B]): Boolean = indexOfSlice(that) != -1
/** Tests whether this $coll contains a given value as an element.
* $mayNotTerminateInf
@@ -584,7 +398,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return a new $coll which contains all elements of this $coll
* followed by all elements of `that`.
*/
- def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
+ override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
this ++ that
/** Computes the multiset difference between this $coll and another sequence.
@@ -606,10 +420,10 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
*/
- def diff[B >: A](that: Seq[B]): Repr = {
- val occ = occCounts(that)
+ def diff[B >: A](that: GenSeq[B]): Repr = {
+ val occ = occCounts(that.seq)
val b = newBuilder
- for (x <- this.seq)
+ for (x <- this)
if (occ(x) == 0) b += x
else occ(x) -= 1
b.result
@@ -634,10 +448,10 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
*/
- def intersect[B >: A](that: Seq[B]): Repr = {
- val occ = occCounts(that)
+ def intersect[B >: A](that: GenSeq[B]): Repr = {
+ val occ = occCounts(that.seq)
val b = newBuilder
- for (x <- this.seq)
+ for (x <- this)
if (occ(x) > 0) {
b += x
occ(x) -= 1
@@ -659,7 +473,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
def distinct: Repr = {
val b = newBuilder
val seen = mutable.HashSet[A]()
- for (x <- this.seq) {
+ for (x <- this) {
if (!seen(x)) {
b += x
seen += x
@@ -668,41 +482,15 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
b.result
}
- /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence.
- *
- * @param from the index of the first replaced element
- * @param patch the replacement sequence
- * @param replaced the number of elements to drop in the original $coll
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new $coll consisting of all elements of this $coll
- * except that `replaced` elements starting from `from` are replaced
- * by `patch`.
- * @usecase def patch(from: Int, that: Seq[A], replaced: Int): $Coll[A]
- * @return a new $coll consisting of all elements of this $coll
- * except that `replaced` elements starting from `from` are replaced
- * by `patch`.
- */
- def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
val (prefix, rest) = this.splitAt(from)
b ++= toCollection(prefix)
- b ++= patch
+ b ++= patch.seq
b ++= toCollection(rest).view drop replaced
b.result
}
- /** A copy of this $coll with one single replaced element.
- * @param index the position of the replacement
- * @param elem the replacing element
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
- * @usecase def updated(index: Int, elem: A): $Coll[A]
- * @return a copy of this $coll with the element at position `index` replaced by `elem`.
- */
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
val (prefix, rest) = this.splitAt(index)
@@ -712,17 +500,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
b.result
}
- /** Prepends an element to this $coll
- * @param elem the prepended element
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` consisting of `elem` followed
- * by all elements of this $coll.
- * @usecase def +:(elem: A): $Coll[A]
- * @return a new $coll consisting of `elem` followed
- * by all elements of this $coll.
- */
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b += elem
@@ -730,18 +507,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
b.result
}
- /** Appends an element to this $coll
- * $willNotTerminateInf
- * @param elem the appended element
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` consisting of
- * all elements of this $coll followed by `elem`.
- * @usecase def :+(elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by `elem`.
- */
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b ++= thisCollection
@@ -749,20 +514,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
b.result
}
- /** Appends an element value to this $coll until a given target length is reached.
- * @param len the target length
- * @param elem the padding value
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` consisting of
- * all elements of this $coll followed by the minimal number of occurrences of `elem` so
- * that the resulting collection has a length of at least `len`.
- * @usecase def padTo(len: Int, elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by the minimal number of occurrences of `elem` so
- * that the resulting $coll has a length of at least `len`.
- */
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b.sizeHint(length max len)
@@ -775,17 +526,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
b.result
}
- /** Tests whether every element of this $coll relates to the
- * corresponding element of another sequence by satisfying a test predicate.
- *
- * @param that the other sequence
- * @param p the test predicate, which relates elements from both sequences
- * @tparam B the type of the elements of `that`
- * @return `true` if both sequences have the same length and
- * `p(x, y)` is `true` for all corresponding elements `x` of this $coll
- * and `y` of `that`, otherwise `false`.
- */
- def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = {
+ def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
val i = this.iterator
val j = that.iterator
while (i.hasNext && j.hasNext)
@@ -883,33 +624,13 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
override def view(from: Int, until: Int) = view.slice(from, until)
- /** Hashcodes for $Coll produce a value from the hashcodes of all the
- * elements of the $coll.
- */
- override def hashCode() = {
- val h = new util.MurmurHash[A](Seq.hashSeed)
- this.foreach(h)
- h.hash
- }
-
- /** The equals method for arbitrary sequences. Compares this sequence to
- * some other object.
- * @param that The object to compare the sequence to
- * @return `true` if `that` is a sequence that has the same elements as
- * this sequence in the same order, `false` otherwise
- */
- override def equals(that: Any): Boolean = that match {
- case that: Seq[_] => (that canEqual this) && (this sameElements that)
- case _ => false
- }
-
/* Need to override string, so that it's not the Function1's string that gets mixed in.
*/
override def toString = super[IterableLike].toString
/** Returns index of the last element satisfying a predicate, or -1.
*/
- @deprecated("use `lastIndexWhere` instead")
+ @deprecated("use `lastIndexWhere` instead", "2.8.0")
def findLastIndexOf(p: A => Boolean): Int = lastIndexWhere(p)
/** Tests whether every element of this $coll relates to the
@@ -922,7 +643,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* `p(x, y)` is `true` for all corresponding elements `x` of this $coll
* and `y` of `that`, otherwise `false`.
*/
- @deprecated("use `corresponds` instead")
+ @deprecated("use `corresponds` instead", "2.8.0")
def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = corresponds(that)(f)
/**
@@ -930,7 +651,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* <code>map</code>, and <code>flatMap</code> methods that build projections
* of the collection.
*/
- @deprecated("use `view' instead")
+ @deprecated("use `view' instead", "2.8.0")
override def projection = view
}
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index e3c6579cdd..565bd9ff5e 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -43,25 +43,25 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
override def reverse: Repr = self.reverse
override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.reverseMap(f)(bf)
override def reverseIterator: Iterator[A] = self.reverseIterator
- override def startsWith[B](that: Seq[B], offset: Int): Boolean = self.startsWith(that, offset)
- override def startsWith[B](that: Seq[B]): Boolean = self.startsWith(that)
- override def endsWith[B](that: Seq[B]): Boolean = self.endsWith(that)
- override def indexOfSlice[B >: A](that: Seq[B]): Int = self.indexOfSlice(that)
- override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = self.indexOfSlice(that)
- override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = self.lastIndexOfSlice(that)
- override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
- override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSlice(that) != -1
+ override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = self.startsWith(that, offset)
+ override def startsWith[B](that: GenSeq[B]): Boolean = self.startsWith(that)
+ override def endsWith[B](that: GenSeq[B]): Boolean = self.endsWith(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B]): Int = self.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = self.indexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1
override def contains(elem: Any): Boolean = self.contains(elem)
- override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
- override def diff[B >: A](that: Seq[B]): Repr = self.diff(that)
- override def intersect[B >: A](that: Seq[B]): Repr = self.intersect(that)
+ override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
+ override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that)
+ override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that)
override def distinct: Repr = self.distinct
- override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf)
+ override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf)
override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.updated(index, elem)(bf)
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.+:(elem)(bf)
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.:+(elem)(bf)
override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.padTo(len, elem)(bf)
- override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p)
+ override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p)
override def sortWith(lt: (A, A) => Boolean): Repr = self.sortWith(lt)
override def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = self.sortBy(f)(ord)
override def sorted[B >: A](implicit ord: Ordering[B]): Repr = self.sorted(ord)
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index d3a1cb040d..9f936e58aa 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -16,7 +16,7 @@ import TraversableView.NoBuilder
/** A base trait for non-strict views of sequences.
* $seqViewInfo
*/
-trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]]
+trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]] with GenSeqView[A, Coll]
/** An object containing the necessary implicit definitions to make
* `SeqView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 2bd8e29b65..f79baa7c58 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -31,170 +31,62 @@ import TraversableView.NoBuilder
trait SeqViewLike[+A,
+Coll,
+This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
- extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
+ extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] with GenSeqViewLike[A, Coll, This]
{ self =>
- trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] {
+ trait Transformed[+B] extends SeqView[B, Coll] with super[IterableViewLike].Transformed[B] with super[GenSeqViewLike].Transformed[B] {
def length: Int
def apply(idx: Int): B
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] with super.EmptyView {
- final override def length = 0
- final override def apply(n: Int) = Nil(n)
- }
+ trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
- trait Forced[B] extends super.Forced[B] with Transformed[B] {
- def length = forced.length
- def apply(idx: Int) = forced.apply(idx)
- }
+ trait Forced[B] extends super[IterableViewLike].Forced[B] with super[GenSeqViewLike].Forced[B] with Transformed[B]
- trait Sliced extends super.Sliced with Transformed[A] {
- def length = iterator.size
- def apply(idx: Int): A =
- if (idx + from < until) self.apply(idx + from)
- else throw new IndexOutOfBoundsException(idx.toString)
+ trait Sliced extends super[IterableViewLike].Sliced with super[GenSeqViewLike].Sliced with Transformed[A]
- override def foreach[U](f: A => U) = iterator foreach f
- override def iterator: Iterator[A] = self.iterator drop from take endpoints.width
- }
+ trait Mapped[B] extends super[IterableViewLike].Mapped[B] with super[GenSeqViewLike].Mapped[B] with Transformed[B]
- trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
- def length = self.length
- def apply(idx: Int): B = mapping(self(idx))
- }
+ trait FlatMapped[B] extends super[IterableViewLike].FlatMapped[B] with super[GenSeqViewLike].FlatMapped[B] with Transformed[B]
- trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
- protected[this] lazy val index = {
- val index = new Array[Int](self.length + 1)
- index(0) = 0
- for (i <- 0 until self.length)
- index(i + 1) = index(i) + mapping(self(i)).size
- index
- }
- protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
- val mid = (lo + hi) / 2
- if (idx < index(mid)) findRow(idx, lo, mid - 1)
- else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
- else mid
- }
- def length = index(self.length)
- def apply(idx: Int) = {
- val row = findRow(idx, 0, self.length - 1)
- mapping(self(row)).toSeq(idx - index(row))
- }
- }
+ trait Appended[B >: A] extends super[IterableViewLike].Appended[B] with super[GenSeqViewLike].Appended[B] with Transformed[B]
- trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
- protected[this] lazy val restSeq = rest.toSeq
- def length = self.length + restSeq.length
- def apply(idx: Int) =
- if (idx < self.length) self(idx) else restSeq(idx - self.length)
- }
+ trait Filtered extends super[IterableViewLike].Filtered with super[GenSeqViewLike].Filtered with Transformed[A]
- trait Filtered extends super.Filtered with Transformed[A] {
- protected[this] lazy val index = {
- var len = 0
- val arr = new Array[Int](self.length)
- for (i <- 0 until self.length)
- if (pred(self(i))) {
- arr(len) = i
- len += 1
- }
- arr take len
- }
- def length = index.length
- def apply(idx: Int) = self(index(idx))
- }
+ trait TakenWhile extends super[IterableViewLike].TakenWhile with super[GenSeqViewLike].TakenWhile with Transformed[A]
- trait TakenWhile extends super.TakenWhile with Transformed[A] {
- protected[this] lazy val len = self prefixLength pred
- def length = len
- def apply(idx: Int) =
- if (idx < len) self(idx)
- else throw new IndexOutOfBoundsException(idx.toString)
- }
+ trait DroppedWhile extends super[IterableViewLike].DroppedWhile with super[GenSeqViewLike].DroppedWhile with Transformed[A]
- trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
- protected[this] lazy val start = self prefixLength pred
- def length = self.length - start
- def apply(idx: Int) =
- if (idx >= 0) self(idx + start)
- else throw new IndexOutOfBoundsException(idx.toString)
- }
+ trait Zipped[B] extends super[IterableViewLike].Zipped[B] with super[GenSeqViewLike].Zipped[B] with Transformed[(A, B)]
- trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] {
- protected[this] lazy val thatSeq = other.toSeq
- /* Have to be careful here - other may be an infinite sequence. */
- def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
- def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
- }
+ trait ZippedAll[A1 >: A, B] extends super[IterableViewLike].ZippedAll[A1, B] with super[GenSeqViewLike].ZippedAll[A1, B] with Transformed[(A1, B)]
- trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] {
- protected[this] lazy val thatSeq = other.toSeq
- def length: Int = self.length max thatSeq.length
- def apply(idx: Int) =
- (if (idx < self.length) self.apply(idx) else thisElem,
- if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
- }
+ trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed
- trait Reversed extends Transformed[A] {
- override def iterator: Iterator[A] = createReversedIterator
- def length: Int = self.length
- def apply(idx: Int): A = self.apply(length - 1 - idx)
- final override protected[this] def viewIdentifier = "R"
-
- private def createReversedIterator = {
- var lst = List[A]()
- for (elem <- self) lst ::= elem
- lst.iterator
- }
- }
+ trait Patched[B >: A] extends Transformed[B] with super[GenSeqViewLike].Patched[B]
- trait Patched[B >: A] extends Transformed[B] {
- protected[this] val from: Int
- protected[this] val patch: Seq[B]
- protected[this] val replaced: Int
- private lazy val plen = patch.length
- override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
- def length: Int = self.length + plen - replaced
- def apply(idx: Int): B =
- if (idx < from) self.apply(idx)
- else if (idx < from + plen) patch.apply(idx - from)
- else self.apply(idx - plen + replaced)
- final override protected[this] def viewIdentifier = "P"
- }
-
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
+ trait Prepended[B >: A] extends Transformed[B] with super[GenSeqViewLike].Prepended[B]
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
+ protected override def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
} with ZippedAll[A1, B]
protected def newReversed: Transformed[A] = new Reversed { }
- protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new {
+ protected def newPatched[B >: A](_from: Int, _patch: GenSeq[B], _replaced: Int): Transformed[B] = new {
val from = _from
val patch = _patch
val replaced = _replaced
@@ -203,7 +95,7 @@ trait SeqViewLike[+A,
override def reverse: This = newReversed.asInstanceOf[This]
- override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
newPatched(from, patch, replaced).asInstanceOf[That]
// was: val b = bf(repr)
// if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That]
@@ -227,13 +119,13 @@ trait SeqViewLike[+A,
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
- override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq union that).asInstanceOf[That]
- override def diff[B >: A](that: Seq[B]): This =
+ override def diff[B >: A](that: GenSeq[B]): This =
newForced(thisSeq diff that).asInstanceOf[This]
- override def intersect[B >: A](that: Seq[B]): This =
+ override def intersect[B >: A](that: GenSeq[B]): This =
newForced(thisSeq intersect that).asInstanceOf[This]
override def sorted[B >: A](implicit ord: Ordering[B]): This =
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index f89df50f26..7241d7fb3a 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -14,16 +14,16 @@ import generic._
/** A base trait for all sets, mutable as well as immutable.
*
* $setNote
- * $setNote2
+ * '''Implementation note:''' If your additions and mutations return the same kind of set as the set
+ * you are defining, you should inherit from `SetLike` as well.
* $setTags
+ *
* @since 1.0
* @author Matthias Zenger
- * @define setNote2
- * '''Implementation note:''' If your additions and mutations return the same kind of set as the set
- * you are defining, you should inherit from `SetLike` as well.
*/
trait Set[A] extends (A => Boolean)
with Iterable[A]
+ with GenSet[A]
with GenericSetTemplate[A, Set]
with SetLike[A, Set[A]] {
override def companion: GenericCompanion[Set] = Set
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 7cbb8ee2ef..c5b97c135d 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -58,8 +58,10 @@ import parallel.ParSet
*/
trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
extends IterableLike[A, This]
+ with GenSetLike[A, This]
with Subtractable[A, This]
- with Parallelizable[A, ParSet[A]] {
+ with Parallelizable[A, ParSet[A]]
+{
self =>
/** The empty set of the same type as this set
@@ -123,7 +125,7 @@ self =>
* @param elems the collection containing the added elements.
* @return a new $coll with the given elements added.
*/
- def ++ (elems: TraversableOnce[A]): This = newBuilder ++= this ++= elems result
+ def ++ (elems: GenTraversableOnce[A]): This = newBuilder ++= seq ++= elems.seq result
/** Creates a new set with a given element removed from this set.
*
@@ -139,39 +141,14 @@ self =>
*/
override def isEmpty: Boolean = size == 0
- /** Tests if some element is contained in this set.
- *
- * This method is equivalent to `contains`. It allows sets to be interpreted as predicates.
- * @param elem the element to test for membership.
- * @return `true` if `elem` is contained in this set, `false` otherwise.
- */
- def apply(elem: A): Boolean = this contains elem
-
- /** Computes the intersection between this set and another set.
- *
- * @param that the set to intersect with.
- * @return a new set consisting of all elements that are both in this
- * set and in the given set `that`.
- */
- def intersect(that: Set[A]): This = this filter that
-
- /** Computes the intersection between this set and another set.
- *
- * '''Note:''' Same as `intersect`.
- * @param that the set to intersect with.
- * @return a new set consisting of all elements that are both in this
- * set and in the given set `that`.
- */
- def &(that: Set[A]): This = this intersect that
-
- /** This method is an alias for `intersect`.
+ /** This method is an alias for `intersect`.
* It computes an intersection with set `that`.
* It removes all the elements that are not present in `that`.
*
* @param that the set to intersect with
*/
- @deprecated("use & instead")
- def ** (that: Set[A]): This = &(that)
+ @deprecated("use & instead", "2.8.0")
+ def ** (that: GenSet[A]): This = &(that)
/** Computes the union between of set and another set.
*
@@ -179,16 +156,7 @@ self =>
* @return a new set consisting of all elements that are in this
* set or in the given set `that`.
*/
- def union(that: Set[A]): This = this ++ that
-
- /** Computes the union between this set and another set.
- *
- * '''Note:''' Same as `union`.
- * @param that the set to form the union with.
- * @return a new set consisting of all elements that are in this
- * set or in the given set `that`.
- */
- def | (that: Set[A]): This = this union that
+ def union(that: GenSet[A]): This = this ++ that
/** Computes the difference of this set and another set.
*
@@ -196,24 +164,7 @@ self =>
* @return a set containing those elements of this
* set that are not also contained in the given set `that`.
*/
- def diff(that: Set[A]): This = this -- that
-
- /** The difference of this set and another set.
- *
- * '''Note:''' Same as `diff`.
- * @param that the set of elements to exclude.
- * @return a set containing those elements of this
- * set that are not also contained in the given set `that`.
- */
- def &~(that: Set[A]): This = this diff that
-
- /** Tests whether this set is a subset of another set.
- *
- * @param that the set to test.
- * @return `true` if this set is a subset of `that`, i.e. if
- * every element of this set is also an element of `that`.
- */
- def subsetOf(that: Set[A]) = this forall that
+ def diff(that: GenSet[A]): This = this -- that
/** An iterator over all subsets of this set of the given size.
* If the requested size is impossible, an empty iterator is returned.
@@ -290,31 +241,4 @@ self =>
override def stringPrefix: String = "Set"
override def toString = super[IterableLike].toString
- // Careful! Don't write a Set's hashCode like:
- // override def hashCode() = this map (_.hashCode) sum
- // Calling map on a set drops duplicates: any hashcode collisions would
- // then be dropped before they can be added.
- // Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.MurmurHash.symmetricHash(this,Set.hashSeed)
-
- /** Compares this set with another object for equality.
- *
- * '''Note:''' This operation contains an unchecked cast: if `that`
- * is a set, it will assume with an unchecked cast
- * that it has the same element type as this set.
- * Any subsequent ClassCastException is treated as a `false` result.
- * @param that the other object
- * @return `true` if `that` is a set which contains the same elements
- * as this set.
- */
- override def equals(that: Any): Boolean = that match {
- case that: Set[_] =>
- (this eq that) ||
- (that canEqual this) &&
- (this.size == that.size) &&
- (try this subsetOf that.asInstanceOf[Set[A]]
- catch { case ex: ClassCastException => false })
- case _ =>
- false
- }
}
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 9308de45b0..ab31bf32b9 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -26,11 +26,11 @@ trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A,
override def - (elem: A) = self.-(elem)
override def isEmpty: Boolean = self.isEmpty
override def apply(elem: A): Boolean = self.apply(elem)
- override def intersect(that: Set[A]) = self.intersect(that)
- override def &(that: Set[A]): This = self.&(that)
- override def union(that: Set[A]): This = self.union(that)
- override def | (that: Set[A]): This = self.|(that)
- override def diff(that: Set[A]): This = self.diff(that)
- override def &~(that: Set[A]): This = self.&~(that)
- override def subsetOf(that: Set[A]): Boolean = self.subsetOf(that)
+ override def intersect(that: GenSet[A]) = self.intersect(that)
+ override def &(that: GenSet[A]): This = self.&(that)
+ override def union(that: GenSet[A]): This = self.union(that)
+ override def | (that: GenSet[A]): This = self.|(that)
+ override def diff(that: GenSet[A]): This = self.diff(that)
+ override def &~(that: GenSet[A]): This = self.&~(that)
+ override def subsetOf(that: GenSet[A]): Boolean = self.subsetOf(that)
}
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 04e2a87c26..75541407dd 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -33,7 +33,7 @@ self =>
override def until(until: A): This = rangeImpl(None, Some(until))
override def range(from: A, until: A): This = rangeImpl(Some(from), Some(until))
- override def subsetOf(that: Set[A]): Boolean = that match {
+ override def subsetOf(that: GenSet[A]): Boolean = that match {
// TODO: It may actually be pretty rare that the guard here ever
// passes. Is this really worth keeping? If it is, we should add
// more sensible implementations of == to Ordering.
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 25aba13b0f..500c340368 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -15,20 +15,26 @@ import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
import scala.util.control.Breaks
/** A trait for traversable collections.
+ * All operations are guaranteed to be performed in a single-threaded manner.
+ *
* $traversableInfo
*/
trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
+ with GenTraversable[A]
+ with TraversableOnce[A]
with GenericTraversableTemplate[A, Traversable] {
- def companion: GenericCompanion[Traversable] = Traversable
+ override def companion: GenericCompanion[Traversable] = Traversable
+
+ override def seq: Traversable[A] = this
/* The following methods are inherited from TraversableLike
*
override def isEmpty: Boolean
override def size: Int
override def hasDefiniteSize
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def filter(p: A => Boolean): Traversable[A]
override def remove(p: A => Boolean): Traversable[A]
override def partition(p: A => Boolean): (Traversable[A], Traversable[A])
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 950f9e65a7..2d3a4c229c 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.collection
import generic._
@@ -17,8 +15,8 @@ import annotation.migration
import annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
-
/** A template trait for traversable collections of type `Traversable[A]`.
+ *
* $traversableInfo
* @define mutability
* @define traversableInfo
@@ -67,32 +65,13 @@ import parallel.ParIterable
*
* @define Coll Traversable
* @define coll traversable collection
- * @define thatinfo the class of the returned collection. Where possible, `That` is
- * the same class as the current collection class `Repr`, but this
- * depends on the element type `B` being admissible for that class,
- * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]`
- * is found.
- * @define bfinfo an implicit value of class `CanBuildFrom` which determines
- * the result class `That` from the current representation type `Repr` and
- * and the new element type `B`.
- * @define orderDependent
- *
- * Note: might return different results for different runs, unless the underlying collection type is ordered.
- * @define orderDependentFold
- *
- * Note: might return different results for different runs, unless the underlying collection type is ordered.
- * or the operator is associative and commutative.
- * @define mayNotTerminateInf
- *
- * Note: may not terminate for infinite-sized collections.
- * @define willNotTerminateInf
- *
- * Note: will not terminate for infinite-sized collections.
*/
trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
with FilterMonadic[A, Repr]
with TraversableOnce[A]
- with Parallelizable[A, ParIterable[A]] {
+ with GenTraversableLike[A, Repr]
+ with Parallelizable[A, ParIterable[A]]
+{
self =>
import Traversable.breaks._
@@ -155,11 +134,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
result
}
- /** Tests whether this $coll can be repeatedly traversed.
- * @return `true`
- */
- final def isTraversableAgain = true
-
/** Tests whether this $coll is known to have a finite size.
* All strict collections are known to have finite size. For a non-strict collection
* such as `Stream`, the predicate returns `true` if all elements have been computed.
@@ -171,25 +145,11 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
*/
def hasDefiniteSize = true
- /** Concatenates this $coll with the elements of a traversable collection.
- *
- * @param that the traversable to append.
- * @tparam B the element type of the returned collection.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` which contains all elements
- * of this $coll followed by all elements of `that`.
- *
- * @usecase def ++[B](that: TraversableOnce[B]): $Coll[B]
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
- */
- def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.size)
+ if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.seq.size)
b ++= thisCollection
- b ++= that
+ b ++= that.seq
b.result
}
@@ -223,22 +183,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* but Traversable and down can use the overload.
*/
def ++:[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
- (that ++ this)(breakOut)
+ (that ++ seq)(breakOut)
- /** Builds a new collection by applying a function to all elements of this $coll.
- *
- * @param f the function to apply to each element.
- * @tparam B the element type of the returned collection.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` resulting from applying the given function
- * `f` to each element of this $coll and collecting the results.
- *
- * @usecase def map[B](f: A => B): $Coll[B]
- *
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results.
- */
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b.sizeHint(this)
@@ -246,24 +192,9 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- /** Builds a new collection by applying a function to all elements of this $coll
- * and concatenating the results.
- *
- * @param f the function to apply to each element.
- * @tparam B the element type of the returned collection.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` resulting from applying the given collection-valued function
- * `f` to each element of this $coll and concatenating the results.
- *
- * @usecase def flatMap[B](f: A => TraversableOnce[B]): $Coll[B]
- *
- * @return a new $coll resulting from applying the given collection-valued function
- * `f` to each element of this $coll and concatenating the results.
- */
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- for (x <- this) b ++= f(x)
+ for (x <- this) b ++= f(x).seq
b.result
}
@@ -288,23 +219,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
*/
def filterNot(p: A => Boolean): Repr = filter(!p(_))
- /** Builds a new collection by applying a partial function to all elements of this $coll
- * on which the function is defined.
- *
- * @param pf the partial function which filters and maps the $coll.
- * @tparam B the element type of the returned collection.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` resulting from applying the partial function
- * `pf` to each element on which it is defined and collecting the results.
- * The order of the elements is preserved.
- *
- * @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
- *
- * @return a new $coll resulting from applying the given partial function
- * `pf` to each element on which it is defined and collecting the results.
- * The order of the elements is preserved.
- */
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- this) if (pf.isDefinedAt(x)) b += pf(x)
@@ -353,22 +267,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
(l.result, r.result)
}
- /** Partitions this $coll into a map of ${coll}s according to some discriminator function.
- *
- * Note: this method is not re-implemented by views. This means
- * when applied to a view it will always force the view and
- * return a new $coll.
- *
- * @param f the discriminator function.
- * @tparam K the type of keys returned by the discriminator function.
- * @return A map from keys to ${coll}s such that the following invariant holds:
- * {{{
- * (xs partition f)(k) = xs filter (x => f(x) == k)
- * }}}
- * That is, every key `k` is bound to a $coll of those elements `x`
- * for which `f(x)` equals `k`.
- *
- */
def groupBy[K](f: A => K): immutable.Map[K, Repr] = {
val m = mutable.Map.empty[K, Builder[A, Repr]]
for (elem <- this) {
@@ -435,19 +333,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
result
}
- /** Produces a collection containing cummulative results of applying the
- * operator going left to right.
- *
- * $willNotTerminateInf
- * $orderDependent
- *
- * @tparam B the type of the elements in the resulting collection
- * @tparam That the actual type of the resulting collection
- * @param z the initial value
- * @param op the binary operator applied to the intermediate result and the element
- * @param bf $bfinfo
- * @return collection with intermediate results
- */
+ def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That = scanLeft(z)(op)
+
def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b.sizeHint(this, 1)
@@ -457,23 +344,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- /** Produces a collection containing cummulative results of applying the operator going right to left.
- * The head of the collection is the last cummulative result.
- * $willNotTerminateInf
- * $orderDependent
- *
- * Example:
- * {{{
- * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0)
- * }}}
- *
- * @tparam B the type of the elements in the resulting collection
- * @tparam That the actual type of the resulting collection
- * @param z the initial value
- * @param op the binary operator applied to the intermediate result and the element
- * @param bf $bfinfo
- * @return collection with intermediate results
- */
@migration(2, 9,
"This scanRight definition has changed in 2.9.\n" +
"The previous behavior can be reproduced with scanRight.reverse."
@@ -518,7 +388,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* except the first one.
* @throws `UnsupportedOperationException` if the $coll is empty.
*/
- def tail: Repr = {
+ override def tail: Repr = {
if (isEmpty) throw new UnsupportedOperationException("empty.tail")
drop(1)
}
@@ -561,37 +431,12 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- /** Selects first ''n'' elements.
- * $orderDependent
- * @param n Tt number of elements to take from this $coll.
- * @return a $coll consisting only of the first `n` elements of this $coll,
- * or else the whole $coll, if it has less than `n` elements.
- */
def take(n: Int): Repr = slice(0, n)
- /** Selects all elements except first ''n'' ones.
- * $orderDependent
- * @param n the number of elements to drop from this $coll.
- * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the
- * empty $coll, if this $coll has less than `n` elements.
- */
def drop(n: Int): Repr =
if (n <= 0) newBuilder ++= thisCollection result
else sliceWithKnownDelta(n, Int.MaxValue, -n)
- /** Selects an interval of elements. The returned collection is made up
- * of all elements `x` which satisfy the invariant:
- * {{{
- * from <= indexOf(x) < until
- * }}}
- * $orderDependent
- *
- * @param from the lowest index to include from this $coll.
- * @param until the highest index to EXCLUDE from this $coll.
- * @return a $coll containing the elements greater than or equal to
- * index `from` extending up to (but not including) index `until`
- * of this $coll.
- */
def slice(from: Int, until: Int): Repr = sliceWithKnownBound(from max 0, until)
// Precondition: from >= 0, until > 0, builder already configured for building.
@@ -625,12 +470,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
}
}
- /** Takes longest prefix of elements that satisfy a predicate.
- * $orderDependent
- * @param p The predicate used to test elements.
- * @return the longest prefix of this $coll whose elements all satisfy
- * the predicate `p`.
- */
def takeWhile(p: A => Boolean): Repr = {
val b = newBuilder
breakable {
@@ -642,12 +481,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- /** Drops longest prefix of elements that satisfy a predicate.
- * $orderDependent
- * @param p The predicate used to test elements.
- * @return the longest suffix of this $coll whose first element
- * does not satisfy the predicate `p`.
- */
def dropWhile(p: A => Boolean): Repr = {
val b = newBuilder
var go = false
@@ -658,17 +491,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- /** Splits this $coll into a prefix/suffix pair according to a predicate.
- *
- * Note: `c span p` is equivalent to (but possibly more efficient than)
- * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the
- * predicate `p` does not cause any side-effects.
- * $orderDependent
- *
- * @param p the test predicate
- * @return a pair consisting of the longest prefix of this $coll whose
- * elements all satisfy `p`, and the rest of this $coll.
- */
def span(p: A => Boolean): (Repr, Repr) = {
val l, r = newBuilder
var toLeft = true
@@ -679,15 +501,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
(l.result, r.result)
}
- /** Splits this $coll into two at a given position.
- * Note: `c splitAt n` is equivalent to (but possibly more efficient than)
- * `(c take n, c drop n)`.
- * $orderDependent
- *
- * @param n the position at which to split.
- * @return a pair of ${coll}s consisting of the first `n`
- * elements of this $coll, and the other elements.
- */
def splitAt(n: Int): (Repr, Repr) = {
val l, r = newBuilder
l.sizeHintBounded(n, this)
@@ -860,10 +673,10 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* @return a new $coll resulting from applying the given collection-valued function
* `f` to each element of the outer $coll that satisfies predicate `p` and concatenating the results.
*/
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- self)
- if (p(x)) b ++= f(x)
+ if (p(x)) b ++= f(x).seq
b.result
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index f660d31ab2..90f1583c58 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -17,6 +17,15 @@ import annotation.unchecked.{ uncheckedVariance => uV }
*
* @tparam A the element type of the collection
*
+ * @author Martin Odersky
+ * @author Paul Phillips
+ * @version 2.8
+ * @since 2.8
+ *
+ * @define coll traversable or iterator
+ *
+ * @tparam A the element type of the collection
+ *
* @define traversableonceinfo
* This trait exists primarily to eliminate code duplication between
* `Iterator` and `Traversable`, and thus implements some of the common
@@ -31,11 +40,6 @@ import annotation.unchecked.{ uncheckedVariance => uV }
* creating an `Iterator` with one of the methods on the `Iterator` object,
* or declaring a subclass of `Traversable`.
*
- * @author Martin Odersky
- * @author Paul Phillips
- * @version 2.8
- * @since 2.8
- *
* @define coll traversable or iterator
* @define orderDependent
*
@@ -52,7 +56,7 @@ import annotation.unchecked.{ uncheckedVariance => uV }
*
* Note: will not terminate for infinite-sized collections.
*/
-trait TraversableOnce[+A] {
+trait TraversableOnce[+A] extends GenTraversableOnce[A] {
self =>
/** Self-documenting abstract methods. */
@@ -60,38 +64,14 @@ trait TraversableOnce[+A] {
def isEmpty: Boolean
def hasDefiniteSize: Boolean
- /** Tests whether this $coll can be repeatedly traversed. Always
- * true for Traversables and false for Iterators unless overridden.
- *
- * @return `true` if it is repeatedly traversable, `false` otherwise.
- */
- def isTraversableAgain: Boolean
-
- /** Returns an Iterator over the elements in this $coll. Will return
- * the same Iterator if this instance is already an Iterator.
- * $willNotTerminateInf
- * @return an Iterator containing all elements of this $coll.
- */
- def toIterator: Iterator[A]
-
- /** Converts this $coll to an unspecified Traversable. Will return
- * the same collection if this instance is already Traversable.
- * $willNotTerminateInf
- * @return a Traversable containing all elements of this $coll.
- */
- def toTraversable: Traversable[A]
-
- /** Converts this $coll to a stream.
- * $willNotTerminateInf
- * @return a stream containing all elements of this $coll.
- */
- def toStream: Stream[A]
-
// Note: We could redefine this in TraversableLike to always return `repr`
// of type `Repr`, only if `Repr` had type bounds, which it doesn't, because
// not all `Repr` are a subtype `TraversableOnce[A]`.
// The alternative is redefining it for maps, sets and seqs. For concrete implementations
// we don't have to do this anyway, since they are leaves in the inheritance hierarchy.
+ // Note 2: This is implemented in all collections _not_ inheriting `Traversable[A]`
+ // at least indirectly. Currently, these are `ArrayOps` and `StringOps`.
+ // It is also implemented in `TraversableOnce[A]`.
/** A version of this collection with all
* of the operations implemented sequentially (i.e. in a single-threaded manner).
*
@@ -101,7 +81,7 @@ trait TraversableOnce[+A] {
*
* @return a sequential view of the collection.
*/
- def seq: TraversableOnce[A] = this
+ def seq: TraversableOnce[A]
/** Presently these are abstract because the Traversable versions use
* breakable/break, and I wasn't sure enough of how that's supposed to
@@ -119,29 +99,14 @@ trait TraversableOnce[+A] {
elems
}
- /** The size of this $coll.
- *
- * $willNotTerminateInf
- *
- * @return the number of elements in this $coll.
- */
def size: Int = {
var result = 0
for (x <- self) result += 1
result
}
- /** Tests whether the $coll is not empty.
- *
- * @return `true` if the $coll contains at least one element, `false` otherwise.
- */
def nonEmpty: Boolean = !isEmpty
- /** Counts the number of elements in the $coll which satisfy a predicate.
- *
- * @param p the predicate used to test elements.
- * @return the number of elements satisfying the predicate `p`.
- */
def count(p: A => Boolean): Int = {
var cnt = 0
for (x <- this)
@@ -169,100 +134,19 @@ trait TraversableOnce[+A] {
None
}
- /** Applies a binary operator to a start value and all elements of this $coll,
- * going left to right.
- *
- * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as
- * `xs foldLeft z`.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
- /** Applies a binary operator to all elements of this $coll and a start value,
- * going right to left.
- *
- * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as
- * `xs foldRight z`.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value
- * @param op the binary operator
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
- /** Applies a binary operator to a start value and all elements of this $coll,
- * going left to right.
- *
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
def foldLeft[B](z: B)(op: (B, A) => B): B = {
var result = z
this.seq foreach (x => result = op(result, x))
result
}
- /** Applies a binary operator to all elements of this $coll and a start value,
- * going right to left.
- *
- * $willNotTerminateInf
- * $orderDependentFold
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
def foldRight[B](z: B)(op: (A, B) => B): B =
reversed.foldLeft(z)((x, y) => op(y, x))
- /** Applies a binary operator to all elements of this $coll, going left to right.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going left to right:
- * {{{
- * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
- */
def reduceLeft[B >: A](op: (B, A) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceLeft")
@@ -280,20 +164,6 @@ trait TraversableOnce[+A] {
acc
}
- /** Applies a binary operator to all elements of this $coll, going right to left.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going right to left:
- * {{{
- * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
- */
def reduceRight[B >: A](op: (A, B) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceRight")
@@ -301,73 +171,24 @@ trait TraversableOnce[+A] {
reversed.reduceLeft[B]((x, y) => op(y, x))
}
- /** Optionally applies a binary operator to all elements of this $coll, going left to right.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
- * `None` otherwise.
- */
def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
if (isEmpty) None else Some(reduceLeft(op))
- /** Optionally applies a binary operator to all elements of this $coll, going
- * right to left.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
- * `None` otherwise.
- */
def reduceRightOption[B >: A](op: (A, B) => B): Option[B] =
if (isEmpty) None else Some(reduceRight(op))
- /** Sums up the elements of this collection.
- *
- * @param num an implicit parameter defining a set of numeric operations
- * which includes the `+` operator to be used in forming the sum.
- * @tparam B the result type of the `+` operator.
- * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
- *
- * @usecase def sum: A
- *
- * @return the sum of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `sum`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
- *
- */
+ def reduce[A1 >: A](op: (A1, A1) => A1): A1 = reduceLeft(op)
+
+ def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1] = reduceLeftOption(op)
+
+ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
+
+ def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
+
def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
- /** Multiplies up the elements of this collection.
- *
- * @param num an implicit parameter defining a set of numeric operations
- * which includes the `*` operator to be used in forming the product.
- * @tparam B the result type of the `*` operator.
- * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
- *
- * @usecase def product: A
- *
- * @return the product of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `product`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
- */
def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times)
- /** Finds the smallest element.
- *
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
- * @return the smallest element of this $coll with respect to the ordering `cmp`.
- *
- * @usecase def min: A
- * @return the smallest element of this $coll
- */
def min[B >: A](implicit cmp: Ordering[B]): A = {
if (isEmpty)
throw new UnsupportedOperationException("empty.min")
@@ -375,15 +196,6 @@ trait TraversableOnce[+A] {
reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y)
}
- /** Finds the largest element.
- *
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
- * @return the largest element of this $coll with respect to the ordering `cmp`.
- *
- * @usecase def max: A
- * @return the largest element of this $coll.
- */
def max[B >: A](implicit cmp: Ordering[B]): A = {
if (isEmpty)
throw new UnsupportedOperationException("empty.max")
@@ -408,50 +220,14 @@ trait TraversableOnce[+A] {
* $willNotTerminateInf
* @param dest The buffer to which elements are copied.
*/
- def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= self
+ def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= seq
- /** Copies values of this $coll to an array.
- * Fills the given array `xs` with values of this $coll, after skipping `start` values.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @param start the starting index.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A], start: Int): Unit
- */
def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
copyToArray(xs, start, xs.length - start)
- /** Copies values of this $coll to an array.
- * Fills the given array `xs` with values of this $coll.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A]): Unit
- */
def copyToArray[B >: A](xs: Array[B]): Unit =
copyToArray(xs, 0, xs.length)
- /** Converts this $coll to an array.
- * $willNotTerminateInf
- *
- * @tparam B the type of the elements of the array. A `ClassManifest` for
- * this type must be available.
- * @return an array containing all elements of this $coll.
- *
- * @usecase def toArray: Array[A]
- * @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
- */
def toArray[B >: A : ClassManifest]: Array[B] = {
if (isTraversableAgain) {
val result = new Array[B](size)
@@ -461,60 +237,20 @@ trait TraversableOnce[+A] {
else toBuffer.toArray
}
- /** Converts this $coll to a list.
- * $willNotTerminateInf
- * @return a list containing all elements of this $coll.
- */
- def toList: List[A] = new ListBuffer[A] ++= self toList
+ def toTraversable: Traversable[A]
+
+ def toList: List[A] = new ListBuffer[A] ++= seq toList
- /** Converts this $coll to an iterable collection. Note that
- * the choice of target `Iterable` is lazy in this default implementation
- * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may
- * be an iterator which is only traversable once).
- *
- * $willNotTerminateInf
- * @return an `Iterable` containing all elements of this $coll.
- */
def toIterable: Iterable[A] = toStream
- /** Converts this $coll to a sequence. As with `toIterable`, it's lazy
- * in this default implementation, as this `TraversableOnce` may be
- * lazy and unevaluated.
- *
- * $willNotTerminateInf
- * @return a sequence containing all elements of this $coll.
- */
def toSeq: Seq[A] = toStream
- /** Converts this $coll to an indexed sequence.
- * $willNotTerminateInf
- * @return an indexed sequence containing all elements of this $coll.
- */
- def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ self
+ def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ seq
- /** Converts this $coll to a mutable buffer.
- * $willNotTerminateInf
- * @return a buffer containing all elements of this $coll.
- */
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= self
+ def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
- /** Converts this $coll to a set.
- * $willNotTerminateInf
- * @return a set containing all elements of this $coll.
- */
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ self
+ def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
- /** Converts this $coll to a map. This method is unavailable unless
- * the elements are members of Tuple2, each ((T, U)) becoming a key-value
- * pair in the map. Duplicate keys will be overwritten by later keys:
- * if this is an unordered collection, which key is in the resulting map
- * is undefined.
- * $willNotTerminateInf
- * @return a map containing all elements of this $coll.
- * @usecase def toMap[T, U]: Map[T, U]
- * @return a map of type `immutable.Map[T, U]`
- * containing all key/value pairs of type `(T, U)` of this $coll.
- */
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
val b = immutable.Map.newBuilder[T, U]
for (x <- self)
@@ -523,41 +259,11 @@ trait TraversableOnce[+A] {
b.result
}
- /** Displays all elements of this $coll in a string using start, end, and
- * separator strings.
- *
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return a string representation of this $coll. The resulting string
- * begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method
- * `toString`) of all elements of this $coll are separated by
- * the string `sep`.
- *
- * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
- */
def mkString(start: String, sep: String, end: String): String =
addString(new StringBuilder(), start, sep, end).toString
- /** Displays all elements of this $coll in a string using a separator string.
- *
- * @param sep the separator string.
- * @return a string representation of this $coll. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll are separated by the string `sep`.
- *
- * @example `List(1, 2, 3).mkString("|") = "1|2|3"`
- */
def mkString(sep: String): String = mkString("", sep, "")
- /** Displays all elements of this $coll in a string.
- *
- * @return a string representation of this $coll. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll follow each other without any
- * separator string.
- */
def mkString: String = mkString("")
/** Appends all elements of this $coll to a string builder using start, end,
@@ -612,6 +318,8 @@ trait TraversableOnce[+A] {
def addString(b: StringBuilder): StringBuilder = addString(b, "")
}
+
+
object TraversableOnce {
implicit def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
implicit def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
@@ -643,9 +351,8 @@ object TraversableOnce {
class MonadOps[+A](trav: TraversableOnce[A]) {
def map[B](f: A => B): TraversableOnce[B] = trav.toIterator map f
- def flatMap[B](f: A => TraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f
+ def flatMap[B](f: A => GenTraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f
def withFilter(p: A => Boolean) = trav.toIterator filter p
def filter(p: A => Boolean): TraversableOnce[A] = withFilter(p)
}
}
-
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index caf7af0946..15565e57c6 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -30,9 +30,9 @@ trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversabl
override def nonEmpty: Boolean = self.nonEmpty
override def size: Int = self.size
override def hasDefiniteSize = self.hasDefiniteSize
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf)
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf)
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.map(f)(bf)
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf)
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf)
override def filter(p: A => Boolean): Repr = self.filter(p)
override def filterNot(p: A => Boolean): Repr = self.filterNot(p)
override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf)
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index 87690f9548..3fad7d462d 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -15,7 +15,7 @@ import TraversableView.NoBuilder
/** A base trait for non-strict views of traversable collections.
* $traversableViewInfo
*/
-trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] { }
+trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] with GenTraversableView[A, Coll] { }
/** An object containing the necessary implicit definitions to make
* `TraversableView`s work. Its definitions are generally not accessed directly by clients.
@@ -24,7 +24,8 @@ object TraversableView {
class NoBuilder[A] extends Builder[A, Nothing] {
def +=(elem: A): this.type = this
def iterator: Iterator[A] = Iterator.empty
- @deprecated("use `iterator' instead") def elements = iterator
+ @deprecated("use `iterator' instead", "2.8.0")
+ def elements = iterator
def result() = throw new UnsupportedOperationException("TraversableView.Builder.result")
def clear() {}
}
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index b5b18b7a8a..6dc0936c61 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -59,7 +59,7 @@ trait ViewMkString[+A] {
trait TraversableViewLike[+A,
+Coll,
+This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]]
- extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A]
+ extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A] with GenTraversableViewLike[A, Coll, This]
{
self =>
@@ -70,7 +70,6 @@ trait TraversableViewLike[+A,
protected[this] def viewIdentifier: String = ""
protected[this] def viewIdString: String = ""
override def stringPrefix = "TraversableView"
- def viewToString = stringPrefix + viewIdString + "(...)"
def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = {
val b = bf(underlying)
@@ -78,129 +77,36 @@ trait TraversableViewLike[+A,
b.result()
}
- /** The implementation base trait of this view.
- * This trait and all its subtraits has to be re-implemented for each
- * ViewLike class.
- */
- trait Transformed[+B] extends TraversableView[B, Coll] {
+ trait Transformed[+B] extends TraversableView[B, Coll] with super.Transformed[B] {
def foreach[U](f: B => U): Unit
- lazy val underlying = self.underlying
- final override protected[this] def viewIdString = self.viewIdString + viewIdentifier
override def stringPrefix = self.stringPrefix
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] {
- final override def isEmpty = true
- final override def foreach[U](f: Nothing => U): Unit = ()
- }
+
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView
/** A fall back which forces everything into a vector and then applies an operation
* on it. Used for those operations which do not naturally lend themselves to a view
*/
- trait Forced[B] extends Transformed[B] {
- protected[this] val forced: Seq[B]
- def foreach[U](f: B => U) = forced foreach f
- final override protected[this] def viewIdentifier = "C"
- }
+ trait Forced[B] extends Transformed[B] with super.Forced[B]
- trait Sliced extends Transformed[A] {
- protected[this] val endpoints: SliceInterval
- protected[this] def from = endpoints.from
- protected[this] def until = endpoints.until
- // protected def newSliced(_endpoints: SliceInterval): Transformed[A] =
- // self.newSliced(endpoints.recalculate(_endpoints))
-
- def foreach[U](f: A => U) {
- var index = 0
- for (x <- self) {
- if (from <= index) {
- if (until <= index) return
- f(x)
- }
- index += 1
- }
- }
- final override protected[this] def viewIdentifier = "S"
- }
+ trait Sliced extends Transformed[A] with super.Sliced
- trait Mapped[B] extends Transformed[B] {
- protected[this] val mapping: A => B
- def foreach[U](f: B => U) {
- for (x <- self)
- f(mapping(x))
- }
- final override protected[this] def viewIdentifier = "M"
- }
+ trait Mapped[B] extends Transformed[B] with super.Mapped[B]
- trait FlatMapped[B] extends Transformed[B] {
- protected[this] val mapping: A => TraversableOnce[B]
- def foreach[U](f: B => U) {
- for (x <- self)
- for (y <- mapping(x))
- f(y)
- }
- final override protected[this] def viewIdentifier = "N"
- }
+ trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B]
- trait Appended[B >: A] extends Transformed[B] {
- protected[this] val rest: Traversable[B]
- def foreach[U](f: B => U) {
- self foreach f
- rest foreach f
- }
- final override protected[this] def viewIdentifier = "A"
- }
+ trait Appended[B >: A] extends Transformed[B] with super.Appended[B]
- trait Filtered extends Transformed[A] {
- protected[this] val pred: A => Boolean
- def foreach[U](f: A => U) {
- for (x <- self)
- if (pred(x)) f(x)
- }
- final override protected[this] def viewIdentifier = "F"
- }
+ trait Filtered extends Transformed[A] with super.Filtered
- trait TakenWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- def foreach[U](f: A => U) {
- for (x <- self) {
- if (!pred(x)) return
- f(x)
- }
- }
- final override protected[this] def viewIdentifier = "T"
- }
+ trait TakenWhile extends Transformed[A] with super.TakenWhile
- trait DroppedWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- def foreach[U](f: A => U) {
- var go = false
- for (x <- self) {
- if (!go && !pred(x)) go = true
- if (go) f(x)
- }
- }
- final override protected[this] def viewIdentifier = "D"
- }
+ trait DroppedWhile extends Transformed[A] with super.DroppedWhile
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected def newForced[B](xs: => Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
- protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected def newFlatMapped[B](f: A => TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
- protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
-
- protected def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
- protected def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue))
-
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
- newAppended(xs.toTraversable).asInstanceOf[That]
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ newAppended(xs.seq.toTraversable).asInstanceOf[That]
// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
// else super.++[B, That](that)(bf)
}
@@ -215,7 +121,7 @@ trait TraversableViewLike[+A,
override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That =
filter(pf.isDefinedAt).map(pf)(bf)
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
newFlatMapped(f).asInstanceOf[That]
// was: val b = bf(repr)
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
@@ -223,6 +129,21 @@ trait TraversableViewLike[+A,
}
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
+ /** Boilerplate method, to override in each subclass
+ * This method could be eliminated if Scala had virtual classes
+ */
+ protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
+ protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
+ protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
+ protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
+ protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
+
+ protected def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
+ protected def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue))
+
override def filter(p: A => Boolean): This = newFiltered(p)
override def withFilter(p: A => Boolean): This = newFiltered(p)
override def partition(p: A => Boolean): (This, This) = (newFiltered(p), newFiltered(!p(_)))
diff --git a/src/library/scala/collection/generic/Addable.scala b/src/library/scala/collection/generic/Addable.scala
index 5be428305e..8f33a62f10 100644
--- a/src/library/scala/collection/generic/Addable.scala
+++ b/src/library/scala/collection/generic/Addable.scala
@@ -21,7 +21,7 @@ package generic
* @define coll collection
* @define Coll Addable
*/
-@deprecated("Will be removed after scala 2.9")
+@deprecated("Will be removed after scala 2.9", "2.8.0")
trait Addable[A, +Repr <: Addable[A, Repr]] { self =>
/** The representation object of type `Repr` which contains the collection's elements
@@ -52,5 +52,5 @@ trait Addable[A, +Repr <: Addable[A, Repr]] { self =>
* @param elems the collection containing the added elements.
* @return a new $coll with the given elements added.
*/
- def ++ (xs: TraversableOnce[A]): Repr = (repr /: xs) (_ + _)
+ def ++ (xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ + _)
}
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index b56dab9794..ad2381a571 100644
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection
package generic
@@ -21,3 +29,4 @@ trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] wit
+
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index 7e468a9c96..4d6d9ec6a3 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -1,11 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
+
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `TraversableLike`.
*/
trait FilterMonadic[+A, +Repr] {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 084f884ebb..353ab9980f 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -21,7 +21,7 @@ import mutable.Builder
* @define coll collection
* @define Coll CC
*/
-abstract class GenericCompanion[+CC[X] <: Traversable[X]] {
+abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** The underlying collection type with unknown element type */
type Coll = CC[_]
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index 9e151e7543..783a4878f6 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
import scala.collection.parallel.Combiner
@@ -26,3 +34,4 @@ trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] {
}
+
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 0d87a2d548..1e3f7b5e40 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -45,8 +53,13 @@ extends GenericTraversableTemplate[A, CC]
}
-trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]]
+trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable]
{
+ protected[this] override def newCombiner: Combiner[(K, V), CC[K, V]] = {
+ val cb = mapCompanion.newCombiner[K, V]
+ cb
+ }
+
def mapCompanion: GenericParMapCompanion[CC]
def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = {
@@ -59,3 +72,4 @@ trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]]
+
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 839d61bfc3..9e1a04179b 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -14,7 +14,7 @@ package generic
/**
* @since 2.8
*/
-trait GenericSetTemplate[A, +CC[X] <: Set[X]] extends GenericTraversableTemplate[A, CC] {
+trait GenericSetTemplate[A, +CC[X] <: GenSet[X]] extends GenericTraversableTemplate[A, CC] {
def empty: CC[A] = companion.empty[A]
}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 46b39bbf8b..37d86a0aa9 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -25,7 +25,7 @@ import annotation.unchecked.uncheckedVariance
* @define coll collection
* @define Coll CC
*/
-trait GenericTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
+trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
/** Applies a function `f` to all elements of this $coll.
*
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index 2c24b437d8..cc69faa270 100644
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -24,3 +32,4 @@ trait HasNewCombiner[+T, +Repr] {
+
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index fa5c87ad9f..8f9c4c9542 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -41,5 +41,5 @@ trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] {
// Iterable methods could be printed by cat IterableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def'
override def iterator: Iterator[A] = underlying.iterator
- override def sameElements[B >: A](that: Iterable[B]): Boolean = underlying.sameElements(that)
+ override def sameElements[B >: A](that: GenIterable[B]): Boolean = underlying.sameElements(that)
}
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index 1fdee065f6..122d1c9313 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -6,13 +6,13 @@
** |/ **
\* */
-
-
package scala.collection
package generic
+
import mutable.{Builder, MapBuilder}
+
/** A template for companion objects of `Map` and subclasses thereof.
*
* @define coll map
@@ -31,7 +31,7 @@ import mutable.{Builder, MapBuilder}
* @see CanBuildFrom
* @see GenericCanBuildFrom
*/
-abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] {
+abstract class MapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A, B]]] {
/** The type constructor of the collection that can be built by this factory */
type Coll = CC[_, _]
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index b4da60a133..e8c2b239c3 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -16,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]
extends TraversableFactory[CC]
with GenericParCompanion[CC] {
- type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
/**
* A generic implementation of the `CanCombineFrom` trait, which forwards all calls to
@@ -40,4 +48,3 @@ extends TraversableFactory[CC]
-
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index dda49e1354..1a9efdf7a7 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
import scala.collection.parallel.ParMap
@@ -35,3 +43,4 @@ extends MapFactory[CC]
}
}
+
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 7c43b29bf4..4cf39f9041 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -31,3 +39,4 @@ abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] wit
+
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index f763e5fba6..5e733d8444 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -15,7 +15,7 @@ package generic
*
* @since 2.8
*/
-abstract class SeqFactory[CC[X] <: Seq[X] with GenericTraversableTemplate[X, CC]] extends TraversableFactory[CC] {
+abstract class SeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]] extends TraversableFactory[CC] {
/** This method is called in a pattern match { case Seq(...) => }.
*
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index 87760b1b82..2624e63a90 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -45,15 +45,15 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
override def lastIndexWhere(p: A => Boolean): Int = underlying.lastIndexWhere(p)
override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end)
override def reverseIterator: Iterator[A] = underlying.reverseIterator
- override def startsWith[B](that: Seq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
- override def startsWith[B](that: Seq[B]): Boolean = underlying.startsWith(that)
- override def endsWith[B](that: Seq[B]): Boolean = underlying.endsWith(that)
- override def indexOfSlice[B >: A](that: Seq[B]): Int = underlying.indexOfSlice(that)
- override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = underlying.indexOfSlice(that, from)
- override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = underlying.lastIndexOfSlice(that)
- override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
- override def containsSlice[B](that: Seq[B]): Boolean = underlying.containsSlice(that)
+ override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
+ override def startsWith[B](that: GenSeq[B]): Boolean = underlying.startsWith(that)
+ override def endsWith[B](that: GenSeq[B]): Boolean = underlying.endsWith(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B]): Int = underlying.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = underlying.indexOfSlice(that, from)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: GenSeq[B]): Boolean = underlying.containsSlice(that)
override def contains(elem: Any): Boolean = underlying.contains(elem)
- override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
+ override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index 8f42589500..3b8b5d3d47 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -30,7 +30,7 @@ import mutable.Builder
* @see CanBuildFrom
* @see GenericCanBuildFrom
*/
-abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
+abstract class SetFactory[CC[X] <: GenSet[X] with GenSetLike[X, CC[X]]]
extends GenericCompanion[CC] {
def newBuilder[A]: Builder[A, CC[A]]
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index e29e4fcdc9..cab38027f6 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index 1b3425d792..1e151f9212 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -55,5 +55,5 @@ trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
* @return a new $coll that contains all elements of the current $coll
* except one less occurrence of each of the elements of `elems`.
*/
- def --(xs: TraversableOnce[A]): Repr = (repr /: xs) (_ - _)
+ def --(xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ - _)
}
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index de42737e44..66e5d2db77 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -33,7 +33,7 @@ package generic
* @see CanBuildFrom
* @see GenericCanBuildFrom
*/
-abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]]
+abstract class TraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
/** A generic implementation of the `CanBuildFrom` trait, which forwards
diff --git a/src/library/scala/collection/immutable/GenIterable.scala.disabled b/src/library/scala/collection/immutable/GenIterable.scala.disabled
new file mode 100644
index 0000000000..252c721271
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenIterable.scala.disabled
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A base trait for iterable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $iterableInfo
+ */
+trait GenIterable[+A] extends GenTraversable[A]
+ with scala.collection.GenIterable[A]
+ with scala.collection.GenIterableLike[A, GenIterable[A]]
+// with GenericTraversableTemplate[A, GenIterable]
+{
+ def seq: Iterable[A]
+ //override def companion: GenericCompanion[GenIterable] = GenIterable
+}
+
+
+// object GenIterable extends TraversableFactory[GenIterable] {
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = new GenericCanBuildFrom[A]
+// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
+// }
+
diff --git a/src/library/scala/collection/immutable/GenMap.scala.disabled b/src/library/scala/collection/immutable/GenMap.scala.disabled
new file mode 100644
index 0000000000..eb7ef2951c
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenMap.scala.disabled
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package immutable
+
+import generic._
+
+
+/** A base trait for maps that can be mutated.
+ * $possiblyparinfo
+ * $mapNote
+ * $mapTags
+ * @since 1.0
+ * @author Matthias Zenger
+ */
+trait GenMap[A, +B]
+extends GenIterable[(A, B)]
+ with scala.collection.GenMap[A, B]
+ with scala.collection.GenMapLike[A, B, GenMap[A, B]]
+{
+ def seq: Map[A, B]
+}
+
+
+// object GenMap extends MapFactory[GenMap] {
+// def empty[A, B]: Map[A, B] = Map.empty
+
+// /** $mapCanBuildFromInfo */
+// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+// }
diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled
new file mode 100644
index 0000000000..36aff8f8e6
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenSeq.scala.disabled
@@ -0,0 +1,49 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A subtrait of `collection.GenSeq` which represents sequences
+ * that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $seqInfo
+ *
+ * The class adds an `update` method to `collection.Seq`.
+ *
+ * @define Coll mutable.Seq
+ * @define coll mutable sequence
+ */
+trait GenSeq[+A] extends GenIterable[A]
+ with scala.collection.GenSeq[A]
+ with scala.collection.GenSeqLike[A, GenSeq[A]]
+// with GenericTraversableTemplate[A, GenSeq]
+{
+ def seq: Seq[A]
+ //override def companion: GenericCompanion[GenSeq] = GenSeq
+}
+
+
+// object GenSeq extends SeqFactory[GenSeq] {
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = new GenericCanBuildFrom[A]
+// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
+// }
+
+
+
+
+
diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled
new file mode 100644
index 0000000000..3cca6ba66e
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenSet.scala.disabled
@@ -0,0 +1,43 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A generic trait for mutable sets.
+ *
+ * $possiblyparinfo
+ * $setNote
+ * $setTags
+ *
+ * @since 1.0
+ * @author Matthias Zenger
+ * @define Coll mutable.Set
+ * @define coll mutable set
+ */
+trait GenSet[A] extends GenIterable[A]
+ with scala.collection.GenSet[A]
+ with scala.collection.GenSetLike[A, GenSet[A]]
+// with GenericSetTemplate[A, GenSet]
+{
+ //override def companion: GenericCompanion[GenSet] = GenSet
+ def seq: Set[A]
+}
+
+
+// object GenSet extends TraversableFactory[GenSet] {
+// implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+// def newBuilder[A] = Set.newBuilder
+// }
diff --git a/src/library/scala/collection/immutable/GenTraversable.scala.disabled b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
new file mode 100644
index 0000000000..2ee9bd9d8c
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
@@ -0,0 +1,41 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A trait for traversable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $traversableInfo
+ * @define mutability mutable
+ */
+trait GenTraversable[+A] extends scala.collection.GenTraversable[A]
+ with scala.collection.GenTraversableLike[A, GenTraversable[A]]
+// with GenericTraversableTemplate[A, GenTraversable]
+ with Mutable
+{
+ def seq: Traversable[A]
+ //override def companion: GenericCompanion[GenTraversable] = GenTraversable
+}
+
+
+// object GenTraversable extends TraversableFactory[GenTraversable] {
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = new GenericCanBuildFrom[A]
+// def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder
+// }
+
+
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index cf7b0d423a..bf65966e80 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -33,7 +33,7 @@ import parallel.immutable.ParHashMap
* @define willNotTerminateInf
*/
@SerialVersionUID(2L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with CustomParallelizable[(A, B), ParHashMap[A, B]] with Serializable {
+class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Serializable with CustomParallelizable[(A, B), ParHashMap[A, B]] {
override def size: Int = 0
@@ -180,7 +180,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
}
- private[collection] class HashMapCollision1[A,+B](private[HashMap] var hash: Int, var kvs: ListMap[A,B @uV]) extends HashMap[A,B] {
+ private[collection] class HashMapCollision1[A, +B](private[HashMap] var hash: Int, var kvs: ListMap[A, B @uV])
+ extends HashMap[A, B @uV] {
+
override def size = kvs.size
override def get0(key: A, hash: Int, level: Int): Option[B] =
@@ -222,8 +224,12 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
}
- class HashTrieMap[A,+B](private[HashMap] var bitmap: Int, private[collection] var elems: Array[HashMap[A,B @uV]],
- private[HashMap] var size0: Int) extends HashMap[A,B] {
+ class HashTrieMap[A, +B](
+ private[HashMap] var bitmap: Int,
+ private[collection] var elems: Array[HashMap[A, B @uV]],
+ private[HashMap] var size0: Int
+ ) extends HashMap[A, B @uV] {
+
/*
def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
@@ -308,7 +314,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
}
- override def iterator: Iterator[(A, B)] = new CovariantTrieIterator[A, B](elems)
+ override def iterator: Iterator[(A, B)] = new TrieIterator[(A, B)](elems.asInstanceOf[Array[Iterable[(A, B)]]]) {
+ final override def getElem(cc: AnyRef): (A, B) = cc.asInstanceOf[HashMap1[A, B]].ensurePair
+ }
/*
@@ -452,35 +460,6 @@ time { mNew.iterator.foreach( p => ()) }
}
}
- class CovariantTrieIterator[A, +B](elems: Array[HashMap[A, B]]) extends Iterator[(A, B)] {
- private[this] val it = new TrieIterator[A, B](elems)
- def next = it.next
- def hasNext = it.hasNext
- }
-
- class TrieIterator[A, B](elems: Array[HashMap[A, B]]) extends TrieIteratorBase[(A, B), HashMap[A, B]](elems) {
- import TrieIteratorBase._
-
- type This = TrieIterator[A, B]
- private[immutable] def recreateIterator() = new TrieIterator(elems)
- private[immutable] type ContainerType = HashMap1[A, B]
- private[immutable] type TrieType = HashTrieMap[A, B]
- private[immutable] type CollisionType = HashMapCollision1[A, B]
-
- private[immutable] def determineType(x: HashMap[A, B]) = x match {
- case _: HashMap1[_, _] => CONTAINER_TYPE
- case _: HashTrieMap[_, _] => TRIE_TYPE
- case _: HashMapCollision1[_, _] => COLLISION_TYPE
- }
-
- private[immutable] def getElem(cc: ContainerType) = cc.ensurePair
- private[immutable] def getElems(t: TrieType) = t.elems
- private[immutable] def collisionToArray(c: CollisionType) = c.kvs map (x => HashMap(x)) toArray
- private[immutable] def newThisType(xs: Array[HashMap[A, B]]) = new TrieIterator(xs)
- private[immutable] def newDeepArray(size: Int) = new Array[Array[HashMap[A, B]]](size)
- private[immutable] def newSingleArray(el: HashMap[A, B]) = Array(el)
- }
-
private def check[K](x: HashMap[K, _], y: HashMap[K, _], xy: HashMap[K, _]) = { // TODO remove this debugging helper
var xs = Set[K]()
for (elem <- x) xs += elem._1
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 951f6d235e..c4b0c65d99 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -11,6 +11,7 @@
package scala.collection
package immutable
+import annotation.unchecked.{ uncheckedVariance => uV }
import generic._
import collection.parallel.immutable.ParHashSet
@@ -95,34 +96,11 @@ class HashSet[A] extends Set[A]
*/
object HashSet extends ImmutableSetFactory[HashSet] {
- class TrieIterator[A](elems: Array[HashSet[A]]) extends TrieIteratorBase[A, HashSet[A]](elems) {
- import TrieIteratorBase._
-
- type This = TrieIterator[A]
-
- private[immutable] def recreateIterator() = new TrieIterator(elems)
- private[immutable] type ContainerType = HashSet1[A]
- private[immutable] type TrieType = HashTrieSet[A]
- private[immutable] type CollisionType = HashSetCollision1[A]
- private[immutable] def determineType(x: HashSet[A]) = x match {
- case _: HashSet1[_] => CONTAINER_TYPE
- case _: HashTrieSet[_] => TRIE_TYPE
- case _: HashSetCollision1[_] => COLLISION_TYPE
- }
- private[immutable] def getElem(cc: ContainerType): A = cc.key
- private[immutable] def getElems(t: TrieType) = t.elems
- private[immutable] def collisionToArray(c: CollisionType) = c.ks map (x => HashSet(x)) toArray
- private[immutable] def newThisType(xs: Array[HashSet[A]]) = new TrieIterator(xs)
- private[immutable] def newDeepArray(size: Int) = new Array[Array[HashSet[A]]](size)
- private[immutable] def newSingleArray(el: HashSet[A]) = Array(el)
- }
-
/** $setCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
override def empty[A]: HashSet[A] = EmptyHashSet.asInstanceOf[HashSet[A]]
- private object EmptyHashSet extends HashSet[Any] {
- }
+ private object EmptyHashSet extends HashSet[Any] { }
// TODO: add HashSet2, HashSet3, ...
@@ -152,7 +130,9 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def foreach[U](f: A => U): Unit = f(key)
}
- private[immutable] class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A]) extends HashSet[A] {
+ private[immutable] class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A])
+ extends HashSet[A] {
+
override def size = ks.size
override def get0(key: A, hash: Int, level: Int): Boolean =
@@ -197,9 +177,8 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
-
- class HashTrieSet[A](private var bitmap: Int, private[HashSet] var elems: Array[HashSet[A]],
- private var size0: Int) extends HashSet[A] {
+ class HashTrieSet[A](private var bitmap: Int, private[collection] var elems: Array[HashSet[A]], private var size0: Int)
+ extends HashSet[A] {
override def size = size0
@@ -268,8 +247,9 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def iterator = new TrieIterator[A](elems)
-
+ override def iterator = new TrieIterator[A](elems.asInstanceOf[Array[Iterable[A]]]) {
+ final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key
+ }
/*
def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index 7f5cb055f4..81579ef3b7 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -22,12 +22,15 @@ import parallel.immutable.ParIterable
* @define coll immutable iterable collection
*/
trait Iterable[+A] extends Traversable[A]
+// with GenIterable[A]
with scala.collection.Iterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]]
- with Parallelizable[A, ParIterable[A]] {
+ with Parallelizable[A, ParIterable[A]]
+{
override def companion: GenericCompanion[Iterable] = Iterable
protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `immutable.IterableLike` gets introduced, please move this there!
+ override def seq: Iterable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 84b62e7e29..c14e24ab47 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -135,9 +135,9 @@ sealed abstract class List[+A] extends LinearSeq[A]
// Overridden methods from IterableLike and SeqLike or overloaded variants of such methods
- override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
- if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That]
+ if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.seq.toList).asInstanceOf[That]
else super.++(that)
}
@@ -241,10 +241,10 @@ sealed abstract class List[+A] extends LinearSeq[A]
/** Like <code>span</code> but with the predicate inverted.
*/
- @deprecated("use `span { x => !p(x) }` instead")
+ @deprecated("use `span { x => !p(x) }` instead", "2.8.0")
def break(p: A => Boolean): (List[A], List[A]) = span { x => !p(x) }
- @deprecated("use `filterNot' instead")
+ @deprecated("use `filterNot' instead", "2.8.0")
def remove(p: A => Boolean): List[A] = filterNot(p)
/** Computes the difference between this list and the given list
@@ -254,7 +254,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @return this list without the elements of the given list
* `that`.
*/
- @deprecated("use `list1 filterNot (list2 contains)` instead")
+ @deprecated("use `list1 filterNot (list2 contains)` instead", "2.8.0")
def -- [B >: A](that: List[B]): List[B] = {
val b = new ListBuffer[B]
var these = this
@@ -272,7 +272,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @return this list without occurrences of the given object
* `x`.
*/
- @deprecated("use `filterNot (_ == x)` instead")
+ @deprecated("use `filterNot (_ == x)` instead", "2.8.0")
def - [B >: A](x: B): List[B] = {
val b = new ListBuffer[B]
var these = this
@@ -283,7 +283,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
b.toList
}
- @deprecated("use `distinct' instead")
+ @deprecated("use `distinct' instead", "2.8.0")
def removeDuplicates: List[A] = distinct
/** <p>
@@ -302,7 +302,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* .sort((e1, e2) => (e1 compareTo e2) &lt; 0) =
* List("Bob", "John", "Steve", "Tom")</pre>
*/
- @deprecated("use `sortWith' instead")
+ @deprecated("use `sortWith' instead", "2.8.0")
def sort(lt : (A,A) => Boolean): List[A] = {
/** Merge two already-sorted lists */
def merge(l1: List[A], l2: List[A]): List[A] = {
@@ -456,7 +456,7 @@ object List extends SeqFactory[List] {
* or decreasing.
* @return the sorted list of all integers in range [start;end).
*/
- @deprecated("use `iterate' instead")
+ @deprecated("use `iterate' instead", "2.8.0")
def range(start: Int, end: Int, step: Int => Int): List[Int] = {
val up = step(start) > start
val down = step(start) < start
@@ -478,7 +478,7 @@ object List extends SeqFactory[List] {
* @param elem the element composing the resulting list
* @return a list composed of n elements all equal to elem
*/
- @deprecated("use `fill' instead")
+ @deprecated("use `fill' instead", "2.8.0")
def make[A](n: Int, elem: A): List[A] = {
val b = new ListBuffer[A]
var i = 0
@@ -494,7 +494,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists that are to be concatenated
* @return the concatenation of all the lists
*/
- @deprecated("use `xss.flatten' instead of `List.flatten(xss)'")
+ @deprecated("use `xss.flatten' instead of `List.flatten(xss)'", "2.8.0")
def flatten[A](xss: List[List[A]]): List[A] = {
val b = new ListBuffer[A]
for (xs <- xss) {
@@ -512,7 +512,7 @@ object List extends SeqFactory[List] {
* @param xs the list of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead of `List.unzip(xs)'")
+ @deprecated("use `xs.unzip' instead of `List.unzip(xs)'", "2.8.0")
def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
val b1 = new ListBuffer[A]
val b2 = new ListBuffer[B]
@@ -530,7 +530,7 @@ object List extends SeqFactory[List] {
* @param xs the iterable of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead of `List.unzip(xs)'")
+ @deprecated("use `xs.unzip' instead of `List.unzip(xs)'", "2.8.0")
def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
@@ -540,7 +540,7 @@ object List extends SeqFactory[List] {
* Returns the `Left` values in the given `Iterable`
* of `Either`s.
*/
- @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'")
+ @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'", "2.8.0")
def lefts[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[A]](Nil)((e, as) => e match {
case Left(a) => a :: as
@@ -550,7 +550,7 @@ object List extends SeqFactory[List] {
/**
* Returns the `Right` values in the given`Iterable` of `Either`s.
*/
- @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'")
+ @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'", "2.8.0")
def rights[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[B]](Nil)((e, bs) => e match {
case Left(_) => bs
@@ -562,7 +562,7 @@ object List extends SeqFactory[List] {
* @param xs the iterable of Eithers to separate
* @return a pair of lists.
*/
- @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead")
+ @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
es.foldRight[(List[A], List[B])]((Nil, Nil)) {
case (Left(a), (lefts, rights)) => (a :: lefts, rights)
@@ -575,7 +575,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the elements returned by successive
* calls to `it.next`
*/
- @deprecated("use `it.toList' instead of `List.toList(it)'")
+ @deprecated("use `it.toList' instead of `List.toList(it)'", "2.8.0")
def fromIterator[A](it: Iterator[A]): List[A] = it.toList
/** Converts an array into a list.
@@ -584,7 +584,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.toList' instead of `List.fromArray(array)'")
+ @deprecated("use `array.toList' instead of `List.fromArray(array)'", "2.8.0")
def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
/** Converts a range of an array into a list.
@@ -595,7 +595,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.view(start, end).toList' instead of `List.fromArray(array, start, end)'")
+ @deprecated("use `array.view(start, end).toList' instead of `List.fromArray(array, start, end)'", "2.8.0")
def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
var res: List[A] = Nil
var i = start + len
@@ -613,7 +613,7 @@ object List extends SeqFactory[List] {
* @param separator the separator character
* @return the list of substrings
*/
- @deprecated("use `str.split(separator).toList' instead of `List.fromString(str, separator)'")
+ @deprecated("use `str.split(separator).toList' instead of `List.fromString(str, separator)'", "2.8.0")
def fromString(str: String, separator: Char): List[String] = {
var words: List[String] = Nil
var pos = str.length()
@@ -631,7 +631,7 @@ object List extends SeqFactory[List] {
* @param xs the list to convert.
* @return the list in form of a string.
*/
- @deprecated("use `xs.mkString' instead of `List.toString(xs)'")
+ @deprecated("use `xs.mkString' instead of `List.toString(xs)'", "2.8.0")
def toString(xs: List[Char]): String = {
val sb = new StringBuilder()
var xc = xs
@@ -645,7 +645,7 @@ object List extends SeqFactory[List] {
/** Like xs map f, but returns `xs` unchanged if function
* `f` maps all elements to themselves.
*/
- @deprecated("use `xs.mapConserve(f)' instead of `List.mapConserve(xs, f)'")
+ @deprecated("use `xs.mapConserve(f)' instead of `List.mapConserve(xs, f)'", "2.8.0")
def mapConserve[A <: AnyRef](xs: List[A])(f: A => A): List[A] = {
def loop(ys: List[A]): List[A] =
if (ys.isEmpty) xs
@@ -679,7 +679,7 @@ object List extends SeqFactory[List] {
* `[a0, ..., ak]`, `[b0, ..., bl]` and
* `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.map(f)' instead of `List.map2(xs, ys)(f)'")
+ @deprecated("use `(xs, ys).zipped.map(f)' instead of `List.map2(xs, ys)(f)'", "2.8.0")
def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
val b = new ListBuffer[C]
var xc = xs
@@ -703,7 +703,7 @@ object List extends SeqFactory[List] {
* `[c<sub>0</sub>, ..., c<sub>m</sub>]` and
* `n = min(k,l,m)`
*/
- @deprecated("use `(xs, ys, zs).zipped.map(f)' instead of `List.map3(xs, ys, zs)(f)'")
+ @deprecated("use `(xs, ys, zs).zipped.map(f)' instead of `List.map3(xs, ys, zs)(f)'", "2.8.0")
def map3[A,B,C,D](xs: List[A], ys: List[B], zs: List[C])(f: (A, B, C) => D): List[D] = {
val b = new ListBuffer[D]
var xc = xs
@@ -728,7 +728,7 @@ object List extends SeqFactory[List] {
* `[b<sub>0</sub>, ..., b<sub>l</sub>]`
* and `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.forall(f)' instead of `List.forall2(xs, ys)(f)'")
+ @deprecated("use `(xs, ys).zipped.forall(f)' instead of `List.forall2(xs, ys)(f)'", "2.8.0")
def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -750,7 +750,7 @@ object List extends SeqFactory[List] {
* `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
* `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.exists(f)' instead of `List.exists2(xs, ys)(f)'")
+ @deprecated("use `(xs, ys).zipped.exists(f)' instead of `List.exists2(xs, ys)(f)'", "2.8.0")
def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -768,7 +768,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists
* @return the transposed list of lists
*/
- @deprecated("use `xss.transpose' instead of `List.transpose(xss)'")
+ @deprecated("use `xss.transpose' instead of `List.transpose(xss)'", "2.8.0")
def transpose[A](xss: List[List[A]]): List[List[A]] = {
val buf = new ListBuffer[List[A]]
var yss = xss
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 660e78040e..a0f20c6e96 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -97,8 +97,8 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
*
* @param xs the traversable object.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): ListMap[A, B1] =
- ((repr: ListMap[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] =
+ ((repr: ListMap[A, B1]) /: xs.seq) (_ + _)
/** This creates a new mapping without the given <code>key</code>.
* If the map does not contain a mapping for the given key, the
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 0f040b157b..057b68d280 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -99,9 +99,9 @@ class ListSet[A] extends Set[A]
* so we take the easy way out and add ourselves and the argument to
* a new builder.
*/
- override def ++(xs: TraversableOnce[A]): ListSet[A] =
+ override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else new ListSet.ListSetBuilder(this) ++= xs result
+ else new ListSet.ListSetBuilder(this) ++= xs.seq result
private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
private[ListSet] def unchecked_outer: ListSet[A] =
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2c7c1023ed..df1cfa80cd 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -26,6 +26,7 @@ import generic._
* @since 1
*/
trait Map[A, +B] extends Iterable[(A, B)]
+// with GenMap[A, B]
with scala.collection.Map[A, B]
with MapLike[A, B, Map[A, B]] { self =>
@@ -92,7 +93,7 @@ object Map extends ImmutableMapFactory[Map] {
def - (key: Any): Map[Any, Nothing] = this
}
- @deprecated("use `Map.empty' instead")
+ @deprecated("use `Map.empty' instead", "2.8.0")
class EmptyMap[A,B] extends Map[A,B] with Serializable {
override def size: Int = 0
def get(key: A): Option[B] = None
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 13054b9f83..d22adc03bc 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -82,8 +82,8 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param xs the traversable object consisting of key-value pairs.
* @return a new immutable map with the bindings of this map and those from `xs`.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
+ ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index 29beecdcc9..b08b3c9664 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -33,7 +33,7 @@ trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
override def -(key: A) = newProxy(self - key)
override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]) = newProxy(self ++ xs)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq)
override def keySet: immutable.Set[A] = new SetProxy[A] { val self = MapProxy.this.self.keySet }
override def filterKeys(p: A => Boolean) = self.filterKeys(p)
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index b9d8e38701..bf5dc6816a 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -81,7 +81,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
*
* @param elem the element to insert
*/
- @deprecated("Use the method <code>enqueue</code> from now on.")
+ @deprecated("Use `enqueue` instead", "2.7.2")
def +[B >: A](elem: B) = enqueue(elem)
/** Creates a new queue with element added at the end
@@ -99,7 +99,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
*
* @param iter an iterable object
*/
- @deprecated("Use the method <code>enqueue</code> from now on.")
+ @deprecated("Use `enqueue` instead", "2.7.2")
def +[B >: A](iter: Iterable[B]) = enqueue(iter)
/** Returns a new queue with all elements provided by
@@ -151,6 +151,6 @@ object Queue extends SeqFactory[Queue] {
private object EmptyQueue extends Queue[Nothing](Nil, Nil) { }
- @deprecated("Use Queue.empty instead")
+ @deprecated("Use Queue.empty instead", "2.8.0")
val Empty: Queue[Nothing] = Queue()
}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 435959a645..2a5ba9839f 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -246,7 +246,7 @@ object Range {
NumericRange.count[Long](start, end, step, isInclusive)
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
- override def par = new ParRange(this)
+// override def par = new ParRange(this)
override def isInclusive = true
override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step)
}
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index c83fd3a21b..4bea8fbaf1 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -35,11 +35,12 @@ abstract class RedBlack[A] extends Serializable {
def delete(k: A): Tree[B] = blacken(del(k))
def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
def foreach[U](f: (A, B) => U)
- @deprecated("use `foreach' instead")
+ @deprecated("use `foreach' instead", "2.8.0")
def visit[T](input: T)(f: (T, A, B) => (Boolean, T)): (Boolean, T)
def toStream: Stream[(A,B)]
def iterator: Iterator[(A, B)]
- @deprecated("use `iterator' instead") def elements = iterator
+ @deprecated("use `iterator' instead", "2.8.0")
+ def elements = iterator
def upd[B1 >: B](k: A, v: B1): Tree[B1]
def del(k: A): Tree[B]
def smallest: NonEmpty[B]
@@ -164,7 +165,7 @@ abstract class RedBlack[A] extends Serializable {
right foreach f
}
- @deprecated("use `foreach' instead")
+ @deprecated("use `foreach' instead", "2.8.0")
def visit[T](input: T)(f: (T,A,B) => (Boolean, T)): (Boolean, T) = {
val left = this.left.visit(input)(f)
if (!left._1) return left
@@ -280,7 +281,7 @@ abstract class RedBlack[A] extends Serializable {
def foreach[U](f: (A, Nothing) => U) {}
- @deprecated("use `foreach' instead")
+ @deprecated("use `foreach' instead", "2.8.0")
def visit[T](input: T)(f: (T, A, Nothing) => (Boolean, T)) = (true, input)
def rng(from: Option[A], until: Option[A]) = this
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 0a370615fc..11e56df354 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -23,14 +23,16 @@ import parallel.immutable.ParSeq
* @define coll immutable sequence
*/
trait Seq[+A] extends Iterable[A]
+// with GenSeq[A]
with scala.collection.Seq[A]
with GenericTraversableTemplate[A, Seq]
with SeqLike[A, Seq[A]]
- with Parallelizable[A, ParSeq[A]] {
+ with Parallelizable[A, ParSeq[A]]
+{
override def companion: GenericCompanion[Seq] = Seq
override def toSeq: Seq[A] = this
- protected[this] override def parCombiner = ParSeq.newCombiner[A] // if `immutable.SeqLike` gets introduced, please move this there!
override def seq: Seq[A] = this
+ protected[this] override def parCombiner = ParSeq.newCombiner[A] // if `immutable.SeqLike` gets introduced, please move this there!
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 33bc50ab56..ce2b3b1885 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -25,14 +25,16 @@ import parallel.immutable.ParSet
* @define coll immutable set
*/
trait Set[A] extends Iterable[A]
+// with GenSet[A]
with scala.collection.Set[A]
with GenericSetTemplate[A, Set]
with SetLike[A, Set[A]]
- with Parallelizable[A, ParSet[A]] {
+ with Parallelizable[A, ParSet[A]]
+{
override def companion: GenericCompanion[Set] = Set
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
- protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
override def seq: Set[A] = this
+ protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
}
/** $factoryInfo
@@ -56,7 +58,7 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: Any => U): Unit = {}
}
- @deprecated("use `Set.empty' instead")
+ @deprecated("use `Set.empty' instead", "2.8.0")
class EmptySet[A] extends Set[A] with Serializable {
override def size: Int = 0
def contains(elem: A): Boolean = false
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 85798f9cc9..64fa06bf2e 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -61,8 +61,8 @@ trait SortedMap[A, +B] extends Map[A, B]
*
* @param xs the traversable object.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] =
- ((repr: SortedMap[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 210022f197..329f12190f 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -23,7 +23,7 @@ object Stack extends SeqFactory[Stack] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, Stack[A]] = new ArrayBuffer[A] mapResult (buf => new Stack(buf.toList))
- @deprecated("Use Stack.empty instead")
+ @deprecated("Use Stack.empty instead", "2.8.0")
val Empty: Stack[Nothing] = Stack()
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 2a4c29c05e..b6145ecaf0 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -138,7 +138,7 @@ self =>
* then StreamBuilder will be chosen for the implicit.
* we recognize that fact and optimize to get more laziness.
*/
- override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[A]
if (isStreamBuilder(bf)) asThat(
if (isEmpty) that.toStream
@@ -197,7 +197,7 @@ self =>
* @return <code>f(a<sub>0</sub>) ::: ... ::: f(a<sub>n</sub>)</code> if
* this stream is <code>[a<sub>0</sub>, ..., a<sub>n</sub>]</code>.
*/
- override final def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ override final def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[B]
// optimisations are not for speed, but for functionality
// see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala)
@@ -251,7 +251,7 @@ self =>
else super.map(f)(bf)
}
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
def tailFlatMap = asStream[B](tail withFilter p flatMap f)
if (isStreamBuilder(bf)) asThat(
if (isEmpty) Stream.Empty
@@ -332,7 +332,7 @@ self =>
* <code>Stream(a<sub>0</sub>, ..., a<sub>m</sub>)
* zip Stream(b<sub>0</sub>, ..., b<sub>n</sub>)</code> is invoked.
*/
- override final def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
+ override final def zip[A1 >: A, B, That](that: collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)]
if (isStreamBuilder(bf)) asThat(
if (this.isEmpty || that.isEmpty) Stream.Empty
@@ -600,7 +600,8 @@ object Stream extends SeqFactory[Stream] {
else Some((xs.head, xs.tail))
}
- @deprecated("use #:: instead") lazy val lazy_:: = #::
+ @deprecated("use #:: instead", "2.8.0")
+ lazy val lazy_:: = #::
/** An alternative way of building and matching Streams using Stream.cons(hd, tl).
*/
@@ -701,17 +702,17 @@ object Stream extends SeqFactory[Stream] {
/** A stream containing all elements of a given iterator, in the order they are produced.
* @param it The iterator producing the stream's elements
*/
- @deprecated("use it.toStream instead")
+ @deprecated("use it.toStream instead", "2.8.0")
def fromIterator[A](it: Iterator[A]): Stream[A] = it.toStream
/** The concatenation of a sequence of streams
*/
- @deprecated("use xs.flatten instead")
+ @deprecated("use xs.flatten instead", "2.8.0")
def concat[A](xs: Iterable[Stream[A]]): Stream[A] = concat(xs.iterator)
/** The concatenation of all streams returned by an iterator
*/
- @deprecated("use xs.toStream.flatten instead")
+ @deprecated("use xs.toStream.flatten instead", "2.8.0")
def concat[A](xs: Iterator[Stream[A]]): Stream[A] = xs.toStream.flatten //(conforms[Stream[A], scala.collection.Traversable[A]])
/**
@@ -725,7 +726,7 @@ object Stream extends SeqFactory[Stream] {
* @param step the increment function of the stream, must be monotonically increasing or decreasing
* @return the stream starting at value <code>start</code>.
*/
- @deprecated("use `iterate' instead.")
+ @deprecated("use `iterate' instead.", "2.8.0")
def range(start: Int, end: Int, step: Int => Int): Stream[Int] =
iterate(start, end - start)(step)
@@ -735,7 +736,7 @@ object Stream extends SeqFactory[Stream] {
* @param elem the element composing the resulting stream
* @return the stream containing an infinite number of elem
*/
- @deprecated("use `continually' instead")
+ @deprecated("use `continually' instead", "2.8.0")
def const[A](elem: A): Stream[A] = cons(elem, const(elem))
/** Create a stream containing several copies of an element.
@@ -744,7 +745,7 @@ object Stream extends SeqFactory[Stream] {
* @param elem the element composing the resulting stream
* @return the stream composed of n elements all equal to elem
*/
- @deprecated("use fill(n, elem) instead")
+ @deprecated("use fill(n, elem) instead", "2.8.0")
def make[A](n: Int, elem: A): Stream[A] = fill(n)(elem)
}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 31b9284a86..7c44c1e019 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -47,20 +47,20 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: collection.Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected override def newAppended[B >: A](that: collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newZipped[B](that: collection.Iterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: collection.Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
+ protected override def newZipped[B](that: collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with ZippedAll[A1, B]
}
protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: collection.Seq[B], _replaced: Int): Transformed[B] = {
+ protected override def newPatched[B >: A](_from: Int, _patch: collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with Patched[B]
}
protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with Prepended[B]
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 06a62c2604..63d5984b11 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -37,4 +37,6 @@ final class StringOps(override val repr: String) extends StringLike[String] {
override protected[this] def newBuilder = StringBuilder.newBuilder
override def toString = repr
+
+ def seq = this.iterator
}
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 32c19ab1cd..80839db31c 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -19,10 +19,12 @@ import mutable.Builder
* @define mutability immutable
*/
trait Traversable[+A] extends scala.collection.Traversable[A]
+// with GenTraversable[A]
with GenericTraversableTemplate[A, Traversable]
with TraversableLike[A, Traversable[A]]
with Immutable {
override def companion: GenericCompanion[Traversable] = Traversable
+ override def seq: Traversable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 5543e1cba3..7552e1983c 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -110,8 +110,8 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
*
* @param xs the traversable object.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): TreeMap[A, B1] =
- ((repr: TreeMap[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): TreeMap[A, B1] =
+ ((repr: TreeMap[A, B1]) /: xs.seq) (_ + _)
/** A new TreeMap with the entry added is returned,
* assuming that key is <em>not</em> in the TreeMap.
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
new file mode 100644
index 0000000000..088b280b8a
--- /dev/null
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -0,0 +1,219 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package immutable
+
+import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
+import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 }
+import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.tailrec
+
+/** Abandons any pretense of type safety for speed. You can't say I
+ * didn't try: see r23934.
+ */
+private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) extends Iterator[T] {
+ outer =>
+
+ private[immutable] def getElem(x: AnyRef): T
+
+ def initDepth = 0
+ def initArrayStack: Array[Array[Iterable[T @uV]]] = new Array[Array[Iterable[T]]](6)
+ def initPosStack = new Array[Int](6)
+ def initArrayD: Array[Iterable[T @uV]] = elems
+ def initPosD = 0
+ def initSubIter: Iterator[T] = null // to traverse collision nodes
+
+ private[this] var depth = initDepth
+ private[this] var arrayStack: Array[Array[Iterable[T @uV]]] = initArrayStack
+ private[this] var posStack = initPosStack
+ private[this] var arrayD: Array[Iterable[T @uV]] = initArrayD
+ private[this] var posD = initPosD
+ private[this] var subIter = initSubIter
+
+ private[this] def getElems(x: Iterable[T]): Array[Iterable[T]] = (x match {
+ case x: HashTrieMap[a, b] => x.elems
+ case x: HashTrieSet[T] => x.elems
+ }).asInstanceOf[Array[Iterable[T]]]
+
+ private[this] def collisionToArray(x: Iterable[T]): Array[Iterable[T]] = (x match {
+ case x: HashMapCollision1[_, _] => x.kvs.map(x => HashMap(x)).toArray
+ case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray
+ }).asInstanceOf[Array[Iterable[T]]]
+
+ private type SplitIterators = ((Iterator[T], Int), Iterator[T])
+
+ private def isTrie(x: AnyRef) = x match {
+ case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true
+ case _ => false
+ }
+ private def isContainer(x: AnyRef) = x match {
+ case _: HashMap1[_, _] | _: HashSet1[_] => true
+ case _ => false
+ }
+
+ final class DupIterator(xs: Array[Iterable[T]]) extends {
+ override val initDepth = outer.depth
+ override val initArrayStack: Array[Array[Iterable[T @uV]]] = outer.arrayStack
+ override val initPosStack = outer.posStack
+ override val initArrayD: Array[Iterable[T @uV]] = outer.arrayD
+ override val initPosD = outer.posD
+ override val initSubIter = outer.subIter
+ } with TrieIterator[T](xs) {
+ final override def getElem(x: AnyRef): T = outer.getElem(x)
+ }
+
+ def dupIterator: TrieIterator[T] = new DupIterator(elems)
+
+ private[this] def newIterator(xs: Array[Iterable[T]]) = new TrieIterator(xs) {
+ final override def getElem(x: AnyRef): T = outer.getElem(x)
+ }
+
+ private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) =
+ (newIterator(arr), arr map (_.size) sum)
+
+ private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = {
+ val (fst, snd) = arr.splitAt(arr.length / 2)
+
+ (iteratorWithSize(snd), newIterator(fst))
+ }
+ private[this] def splitArray(ad: Array[Iterable[T]]): SplitIterators =
+ if (ad.length > 1) arrayToIterators(ad)
+ else ad(0) match {
+ case _: HashMapCollision1[_, _] | _: HashSetCollision1[_] =>
+ arrayToIterators(collisionToArray(ad(0)))
+ case _ =>
+ splitArray(getElems(ad(0)))
+ }
+
+ def hasNext = (subIter ne null) || depth >= 0
+ def next: T = {
+ if (subIter ne null) {
+ val el = subIter.next
+ if (!subIter.hasNext)
+ subIter = null
+ el
+ } else
+ next0(arrayD, posD)
+ }
+
+ @tailrec private[this] def next0(elems: Array[Iterable[T]], i: Int): T = {
+ if (i == elems.length-1) { // reached end of level, pop stack
+ depth -= 1
+ if (depth >= 0) {
+ arrayD = arrayStack(depth)
+ posD = posStack(depth)
+ arrayStack(depth) = null
+ } else {
+ arrayD = null
+ posD = 0
+ }
+ } else
+ posD += 1
+
+ val m = elems(i)
+
+ // Note: this block is over twice as fast written this way as it is
+ // as a pattern match. Haven't started looking into why that is, but
+ // it's pretty sad the pattern matcher is that much slower.
+ if (isContainer(m))
+ getElem(m) // push current pos onto stack and descend
+ else if (isTrie(m)) {
+ if (depth >= 0) {
+ arrayStack(depth) = arrayD
+ posStack(depth) = posD
+ }
+ depth += 1
+ arrayD = getElems(m)
+ posD = 0
+ next0(getElems(m), 0)
+ }
+ else {
+ subIter = m.iterator
+ next
+ }
+ // The much slower version:
+ //
+ // m match {
+ // case _: HashMap1[_, _] | _: HashSet1[_] =>
+ // getElem(m) // push current pos onto stack and descend
+ // case _: HashTrieMap[_,_] | _: HashTrieSet[_] =>
+ // if (depth >= 0) {
+ // arrayStack(depth) = arrayD
+ // posStack(depth) = posD
+ // }
+ // depth += 1
+ // arrayD = getElems(m)
+ // posD = 0
+ // next0(getElems(m), 0)
+ // case _ =>
+ // subIter = m.iterator
+ // next
+ // }
+ }
+
+ // assumption: contains 2 or more elements
+ // splits this iterator into 2 iterators
+ // returns the 1st iterator, its number of elements, and the second iterator
+ def split: SplitIterators = {
+ // 0) simple case: no elements have been iterated - simply divide arrayD
+ if (arrayD != null && depth == 0 && posD == 0)
+ return splitArray(arrayD)
+
+ // otherwise, some elements have been iterated over
+ // 1) collision case: if we have a subIter, we return subIter and elements after it
+ if (subIter ne null) {
+ val buff = subIter.toBuffer
+ subIter = null
+ ((buff.iterator, buff.length), this)
+ }
+ else {
+ // otherwise find the topmost array stack element
+ if (depth > 0) {
+ // 2) topmost comes before (is not) arrayD
+ // steal a portion of top to create a new iterator
+ val topmost = arrayStack(0)
+ if (posStack(0) == arrayStack(0).length - 1) {
+ // 2a) only a single entry left on top
+ // this means we have to modify this iterator - pop topmost
+ val snd = Array[Iterable[T]](arrayStack(0).last)
+ val szsnd = snd(0).size
+ // modify this - pop
+ depth -= 1
+ 1 until arrayStack.length foreach (i => arrayStack(i - 1) = arrayStack(i))
+ arrayStack(arrayStack.length - 1) = Array[Iterable[T]](null)
+ posStack = posStack.tail ++ Array[Int](0)
+ // we know that `this` is not empty, since it had something on the arrayStack and arrayStack elements are always non-empty
+ ((newIterator(snd), szsnd), this)
+ } else {
+ // 2b) more than a single entry left on top
+ val (fst, snd) = arrayStack(0).splitAt(arrayStack(0).length - (arrayStack(0).length - posStack(0) + 1) / 2)
+ arrayStack(0) = fst
+ (iteratorWithSize(snd), this)
+ }
+ } else {
+ // 3) no topmost element (arrayD is at the top)
+ // steal a portion of it and update this iterator
+ if (posD == arrayD.length - 1) {
+ // 3a) positioned at the last element of arrayD
+ val m = arrayD(posD)
+ arrayToIterators(
+ if (isTrie(m)) getElems(m)
+ else collisionToArray(m)
+ )
+ }
+ else {
+ // 3b) arrayD has more free elements
+ val (fst, snd) = arrayD.splitAt(arrayD.length - (arrayD.length - posD + 1) / 2)
+ arrayD = fst
+ (iteratorWithSize(snd), this)
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/library/scala/collection/immutable/TrieIteratorBase.scala b/src/library/scala/collection/immutable/TrieIteratorBase.scala
deleted file mode 100644
index 02126b456f..0000000000
--- a/src/library/scala/collection/immutable/TrieIteratorBase.scala
+++ /dev/null
@@ -1,184 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package immutable
-
-import annotation.unchecked.uncheckedVariance
-
-object TrieIteratorBase {
- final val TRIE_TYPE = 0
- final val CONTAINER_TYPE = 1
- final val COLLISION_TYPE = 2
-}
-import TrieIteratorBase._
-
-private[immutable] abstract class TrieIteratorBase[+T, CC >: Null <: Iterable[T]](elems: Array[CC]) extends Iterator[T] {
- private[immutable] def recreateIterator(): This
-
- // Since we can't match on abstract types, we call determineType to
- // find out what it is and let the casting gods do the remainder.
- private implicit def fixCC[U <: CC](x: CC): U = x.asInstanceOf[U]
-
- protected var depth = 0
- protected var arrayStack = newDeepArray(6)
- protected var posStack = new Array[Int](6)
- protected var arrayD = elems
- protected var posD = 0
- protected var subIter: Iterator[T @uncheckedVariance] = null // to traverse collision nodes
-
- private[immutable] type TrieType <: CC
- private[immutable] type ContainerType <: CC
- private[immutable] type CollisionType <: CC
-
- // Returns one of the constants defined in TrieIteratorBase to determine type.
- private[immutable] def determineType(x: CC): Int
- private[immutable] def getElem(cc: ContainerType): T
- private[immutable] def getElems(t: TrieType): Array[CC]
- private[immutable] def collisionToArray(c: CollisionType): Array[CC]
- private[immutable] def newThisType(xs: Array[CC]): Iterator[T]
- private[immutable] def newDeepArray(size: Int): Array[Array[CC]]
- private[immutable] def newSingleArray(el: CC): Array[CC]
-
- protected type This <: TrieIteratorBase[T, CC]
- private type SplitIterators = ((Iterator[T], Int), Iterator[T])
-
- def dupIterator: This = {
- val t = recreateIterator()
-
- t.depth = depth
- t.arrayStack = arrayStack
- t.posStack = posStack
- t.arrayD = arrayD
- t.posD = posD
- t.subIter = subIter
-
- t
- }
-
- private def iteratorWithSize(arr: Array[CC]): (Iterator[T], Int) =
- (newThisType(arr), arr map (_.size) sum)
-
- private def arrayToIterators(arr: Array[CC]): SplitIterators = {
- val (fst, snd) = arr.splitAt(arr.length / 2)
-
- (iteratorWithSize(snd), newThisType(fst))
- }
- private def splitArray(ad: Array[CC]): SplitIterators =
- if (ad.length > 1) arrayToIterators(ad)
- else determineType(ad(0)) match {
- case COLLISION_TYPE => arrayToIterators(collisionToArray(ad(0)))
- case TRIE_TYPE => splitArray(getElems(ad(0)))
- }
-
- def hasNext = (subIter ne null) || depth >= 0
- def next: T = {
- if (subIter ne null) {
- val el = subIter.next
- if (!subIter.hasNext)
- subIter = null
- el
- } else
- next0(arrayD, posD)
- }
-
- @scala.annotation.tailrec private[this] def next0(elems: Array[CC], i: Int): T = {
- if (i == elems.length-1) { // reached end of level, pop stack
- depth -= 1
- if (depth >= 0) {
- arrayD = arrayStack(depth)
- posD = posStack(depth)
- arrayStack(depth) = null
- } else {
- arrayD = null
- posD = 0
- }
- } else
- posD += 1
-
- val m = elems(i)
- determineType(m) match {
- case TRIE_TYPE =>
- if (depth >= 0) {
- arrayStack(depth) = arrayD
- posStack(depth) = posD
- }
- depth += 1
- arrayD = getElems(m)
- posD = 0
- next0(getElems(m), 0)
- case CONTAINER_TYPE =>
- getElem(m) // push current pos onto stack and descend
- case _ =>
- subIter = m.iterator
- next
- }
- }
-
- // assumption: contains 2 or more elements
- // splits this iterator into 2 iterators
- // returns the 1st iterator, its number of elements, and the second iterator
- def split: SplitIterators = {
- // 0) simple case: no elements have been iterated - simply divide arrayD
- if (arrayD != null && depth == 0 && posD == 0)
- return splitArray(arrayD)
-
- // otherwise, some elements have been iterated over
- // 1) collision case: if we have a subIter, we return subIter and elements after it
- if (subIter ne null) {
- val buff = subIter.toBuffer
- subIter = null
- ((buff.iterator, buff.length), this)
- }
- else {
- // otherwise find the topmost array stack element
- if (depth > 0) {
- // 2) topmost comes before (is not) arrayD
- // steal a portion of top to create a new iterator
- val topmost = arrayStack(0)
- if (posStack(0) == arrayStack(0).length - 1) {
- // 2a) only a single entry left on top
- // this means we have to modify this iterator - pop topmost
- val snd = newSingleArray(arrayStack(0).last)
- val szsnd = snd(0).size
- // modify this - pop
- depth -= 1
- 1 until arrayStack.length foreach (i => arrayStack(i - 1) = arrayStack(i))
- arrayStack(arrayStack.length - 1) = newSingleArray(null)
- posStack = posStack.tail ++ Array[Int](0)
- // we know that `this` is not empty, since it had something on the arrayStack and arrayStack elements are always non-empty
- ((newThisType(snd), szsnd), this)
- } else {
- // 2b) more than a single entry left on top
- val (fst, snd) = arrayStack(0).splitAt(arrayStack(0).length - (arrayStack(0).length - posStack(0) + 1) / 2)
- arrayStack(0) = fst
- (iteratorWithSize(snd), this)
- }
- } else {
- // 3) no topmost element (arrayD is at the top)
- // steal a portion of it and update this iterator
- if (posD == arrayD.length - 1) {
- // 3a) positioned at the last element of arrayD
- val m = arrayD(posD)
- val arr: Array[CC] = determineType(m) match {
- case COLLISION_TYPE => collisionToArray(m)
- case TRIE_TYPE => getElems(m)
- case _ => sys.error("cannot divide single element")
- }
- arrayToIterators(arr)
- }
- else {
- // 3b) arrayD has more free elements
- val (fst, snd) = arrayD.splitAt(arrayD.length - (arrayD.length - posD + 1) / 2)
- arrayD = fst
- (iteratorWithSize(snd), this)
- }
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 3078a26411..7a4a9bb18a 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -89,9 +89,9 @@ override def companion: GenericCompanion[Vector] = Vector
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
- @deprecated("this method is experimental and will be removed in a future release")
+ @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
@inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f)
- @deprecated("this method is experimental and will be removed in a future release")
+ @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
@inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
val b = bf(repr)
foreachFast(x => b += f(x))
@@ -195,8 +195,8 @@ override def companion: GenericCompanion[Vector] = Vector
// concat (stub)
- override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- super.++(that)
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
+ super.++(that.seq)
}
@@ -663,7 +663,7 @@ class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterator[A] w
v
}
- @deprecated("this method is experimental and will be removed in a future release")
+ @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
@inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) }
}
diff --git a/src/library/scala/collection/interfaces/IterableMethods.scala b/src/library/scala/collection/interfaces/IterableMethods.scala
index 9cc3e2c1ac..f6941c8961 100644
--- a/src/library/scala/collection/interfaces/IterableMethods.scala
+++ b/src/library/scala/collection/interfaces/IterableMethods.scala
@@ -26,13 +26,13 @@ trait IterableMethods[+A, +This <: IterableLike[A, This] with Iterable[A]] exten
// concrete
def dropRight(n: Int): Iterable[A]
def grouped(size: Int): Iterator[Iterable[A]]
- def sameElements[B >: A](that: Iterable[B]): Boolean
+ def sameElements[B >: A](that: GenIterable[B]): Boolean
def sliding[B >: A](size: Int): Iterator[Iterable[A]]
def sliding[B >: A](size: Int, step: Int): Iterator[Iterable[A]]
def takeRight(n: Int): Iterable[A]
- def zipAll[B, A1 >: A, That](that: Iterable[B], e1: A1, e2: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That
+ def zipAll[B, A1 >: A, That](that: GenIterable[B], e1: A1, e2: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That
override def view: IterableView[A, This]
override def view(from: Int, until: Int): IterableView[A, This]
diff --git a/src/library/scala/collection/interfaces/MapMethods.scala b/src/library/scala/collection/interfaces/MapMethods.scala
index a36a1c08f4..bc38ccdd2e 100644
--- a/src/library/scala/collection/interfaces/MapMethods.scala
+++ b/src/library/scala/collection/interfaces/MapMethods.scala
@@ -41,5 +41,5 @@ trait MapMethods[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
def mapValues[C](f: B => C): Map[A, C]
def updated [B1 >: B](key: A, value: B1): Map[A, B1]
def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1]
- def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1]
+ def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1]
}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
index 9d8bf13a20..1f5b08d036 100644
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ b/src/library/scala/collection/interfaces/SeqMethods.scala
@@ -50,7 +50,7 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def lastIndexWhere(p: A => Boolean, end: Int): Int
def lengthCompare(len: Int): Int
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
+ def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
def permutations: Iterator[This]
def prefixLength(p: A => Boolean): Int
def reverse: This
diff --git a/src/library/scala/collection/interfaces/TraversableMethods.scala b/src/library/scala/collection/interfaces/TraversableMethods.scala
index 4a34b0b1ed..8aba39093d 100644
--- a/src/library/scala/collection/interfaces/TraversableMethods.scala
+++ b/src/library/scala/collection/interfaces/TraversableMethods.scala
@@ -18,7 +18,7 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This]] extends Traversa
self: Traversable[A] =>
// maps/iteration
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That
@@ -26,7 +26,7 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This]] extends Traversa
// new collections
def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
// element retrieval
def head: A
diff --git a/src/library/scala/collection/mutable/AddingBuilder.scala b/src/library/scala/collection/mutable/AddingBuilder.scala
index 4a06d33ca3..6f198b81ad 100644
--- a/src/library/scala/collection/mutable/AddingBuilder.scala
+++ b/src/library/scala/collection/mutable/AddingBuilder.scala
@@ -27,7 +27,7 @@ import generic._
* @version 2.8
* @since 2.8
*/
-@deprecated("Will be removed after scala 2.9")
+@deprecated("Will be removed after scala 2.9", "2.8.0")
class AddingBuilder[Elem, To <: Addable[Elem, To] with collection.Iterable[Elem] with collection.IterableLike[Elem, To]](empty: To)
extends Builder[Elem, To] {
protected var elems: To = empty
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 8d28d2bc28..bae6bd1164 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -47,19 +47,19 @@ trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
override def stringPrefix = "Array"
}
- @deprecated("use deep.toString instead")
+ @deprecated("use deep.toString instead", "2.8.0")
final def deepToString() =
deep.toString
- @deprecated("use deep.mkString instead")
+ @deprecated("use deep.mkString instead", "2.8.0")
final def deepMkString(start: String, sep: String, end: String): String =
deep.mkString(start, sep, end)
- @deprecated("use deep.mkString instead")
+ @deprecated("use deep.mkString instead", "2.8.0")
final def deepMkString(sep: String): String =
deepMkString("", sep, "")
- @deprecated("use array1.deep.equals(array2.deep) instead")
+ @deprecated("use array1.deep.equals(array2.deep) instead", "2.8.0")
final def deepEquals(that: Any): Boolean = that match {
case x: AnyRef if x.getClass.isArray => deep.equals(WrappedArray.make(x).deep)
case _ => false
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index b72206e6f3..dcba9a52a5 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -93,6 +93,9 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
for (b <- bs) bb += b.result
bb.result
}
+
+ def seq = this.iterator
+
}
/**
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index b6cafbb677..4b0b4779e9 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -52,7 +52,7 @@ extends IndexedSeq[A]
val array: Array[AnyRef] = new Array[AnyRef](length)
- override def par = ParArray.handoff(array.asInstanceOf[Array[A]])
+ override def par = ParArray.handoff(array.asInstanceOf[Array[A]], length)
def apply(idx: Int): A = {
if (idx >= length) throw new IndexOutOfBoundsException(idx.toString)
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 13f709ba39..2323830b7b 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -122,7 +122,7 @@ extends Seq[T]
}
/** View the top element of the stack. */
- @deprecated("use top instead")
+ @deprecated("use top instead", "2.8.0")
def peek = top
/** View the top element of the stack.
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index d29ab9cd48..3c9e336e6f 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -220,7 +220,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @param start the first element to append
* @param len the number of elements to append
*/
- @deprecated("replace by: `buf ++= src.view(start, end)`")
+ @deprecated("replace by: `buf ++= src.view(start, end)`", "2.8.0")
def ++=(src: Array[A], start: Int, len: Int) {
var i = start
val end = i + len
@@ -239,7 +239,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @param elem the element to add.
*/
@deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.")
+ "Use `clone() +=' if you intend to create a new collection.", "2.8.0")
def + (elem: A): This = { +=(elem); repr }
/** Adds two or more elements to this collection and returns
@@ -253,7 +253,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @param elems the remaining elements to add.
*/
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
+ "Use `clone() ++=' if you intend to create a new collection.", "2.8.0")
def + (elem1: A, elem2: A, elems: A*): This = {
this += elem1 += elem2 ++= elems
repr
@@ -269,7 +269,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
"As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
"Use ++= instead if you intend to add by side effect to an existing collection.\n"
)
- def ++(xs: TraversableOnce[A]): This = clone() ++= xs
+ def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
/** Creates a new collection with all the elements of this collection except `elem`.
*
@@ -308,5 +308,5 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
"As of 2.8, -- always creates a new collection, even on Buffers.\n"+
"Use --= instead if you intend to remove by side effect from an existing collection.\n"
)
- override def --(xs: TraversableOnce[A]): This = clone() --= xs
+ override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
}
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 3222ad1e53..03102f73d2 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -45,7 +45,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
* @return the updated buffer.
*/
@deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
+ "Use `clone() ++=' if you intend to create a new collection.", "2.8.0")
override def +(elem: A): Buffer[A] = self.+(elem)
/** Append a single element to this buffer.
@@ -64,8 +64,8 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
* @return the updated buffer.
*/
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=` if you intend to create a new collection.")
- override def ++(xs: TraversableOnce[A]): Buffer[A] = self.++(xs)
+ "Use `clone() ++=` if you intend to create a new collection.", "2.8.0")
+ override def ++(xs: GenTraversableOnce[A]): Buffer[A] = self.++(xs)
/** Appends a number of elements provided by a traversable object.
*
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index 28bf2ceb8e..e6fbce415a 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -20,5 +20,6 @@ package mutable
@cloneable
trait Cloneable[+A <: AnyRef] {
// !!! why doesn't this extend java.lang.Cloneable?
+ // because neither did @serializable, then we changed it to Serializable
override def clone: A = super.clone().asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/GenIterable.scala.disabled b/src/library/scala/collection/mutable/GenIterable.scala.disabled
new file mode 100644
index 0000000000..7fd6a8ca2c
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenIterable.scala.disabled
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A base trait for iterable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $iterableInfo
+ */
+trait GenIterable[A] extends GenTraversable[A]
+ with scala.collection.GenIterable[A]
+ with scala.collection.GenIterableLike[A, GenIterable[A]]
+// with GenericTraversableTemplate[A, GenIterable]
+{
+ def seq: Iterable[A]
+ //override def companion: GenericCompanion[GenIterable] = GenIterable
+}
+
+
+// object GenIterable extends TraversableFactory[GenIterable] {
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = new GenericCanBuildFrom[A]
+// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
+// }
+
+
diff --git a/src/library/scala/collection/mutable/GenMap.scala.disabled b/src/library/scala/collection/mutable/GenMap.scala.disabled
new file mode 100644
index 0000000000..eca63b43ce
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenMap.scala.disabled
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A base trait for maps that can be mutated.
+ * $possiblyparinfo
+ * $mapNote
+ * $mapTags
+ * @since 1.0
+ * @author Matthias Zenger
+ */
+trait GenMap[A, B]
+extends GenIterable[(A, B)]
+ with scala.collection.GenMap[A, B]
+ with scala.collection.GenMapLike[A, B, GenMap[A, B]]
+{
+ def seq: Map[A, B]
+}
+
+
+// object GenMap extends MapFactory[GenMap] {
+// def empty[A, B]: Map[A, B] = Map.empty
+
+// /** $mapCanBuildFromInfo */
+// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+// }
+
diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled
new file mode 100644
index 0000000000..376a2ceb8b
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenSeq.scala.disabled
@@ -0,0 +1,44 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A subtrait of `collection.GenSeq` which represents sequences
+ * that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $seqInfo
+ *
+ * The class adds an `update` method to `collection.Seq`.
+ *
+ * @define Coll mutable.Seq
+ * @define coll mutable sequence
+ */
+trait GenSeq[A] extends GenIterable[A]
+ with scala.collection.GenSeq[A]
+ with scala.collection.GenSeqLike[A, GenSeq[A]]
+// with GenericTraversableTemplate[A, GenSeq]
+{
+ //override def companion: GenericCompanion[GenSeq] = GenSeq
+ def seq: Seq[A]
+}
+
+
+// object GenSeq extends SeqFactory[GenSeq] {
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = new GenericCanBuildFrom[A]
+// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
+// }
+
diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled
new file mode 100644
index 0000000000..7416577a6a
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenSet.scala.disabled
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+
+import generic._
+
+
+/** A generic trait for mutable sets.
+ *
+ * $possiblyparinfo
+ * $setNote
+ * $setTags
+ *
+ * @since 1.0
+ * @author Matthias Zenger
+ * @define Coll mutable.Set
+ * @define coll mutable set
+ */
+trait GenSet[A] extends GenIterable[A]
+ with Growable[A]
+ with scala.collection.GenSet[A]
+ with scala.collection.GenSetLike[A, GenSet[A]]
+// with GenericSetTemplate[A, GenSet]
+{
+ //override def companion: GenericCompanion[GenSet] = GenSet
+ def seq: Set[A]
+}
+
+
+// object GenSet extends TraversableFactory[GenSet] {
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = new GenericCanBuildFrom[A]
+// def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder
+// }
+
+
diff --git a/src/library/scala/collection/mutable/GenTraversable.scala.disabled b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
new file mode 100644
index 0000000000..1ad9bfa9ca
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
@@ -0,0 +1,38 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A trait for traversable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $traversableInfo
+ * @define mutability mutable
+ */
+trait GenTraversable[A] extends scala.collection.GenTraversable[A]
+ with scala.collection.GenTraversableLike[A, GenTraversable[A]]
+// with GenericTraversableTemplate[A, GenTraversable]
+ with Mutable
+{
+ def seq: Traversable[A]
+ //override def companion: GenericCompanion[GenTraversable] = GenTraversable
+}
+
+// object GenTraversable extends TraversableFactory[GenTraversable] {
+// implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+// def newBuilder[A] = Traversable.newBuilder
+// }
+
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index b8409ab3bd..dea1ec7d44 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -190,7 +190,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
protected final def foreachEntry[C](f: Entry => C) { entriesIterator.foreach(f) }
/** An iterator returning all entries */
- @deprecated("use entriesIterator instead")
+ @deprecated("use entriesIterator instead", "2.8.0")
protected def entries: Iterator[Entry] = entriesIterator
/** Remove all entries from table
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index b5e003dcfb..d541ad8f52 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -54,7 +54,7 @@ extends Map[A, B] with Serializable
def iterator: Iterator[(A, B)] = imap.iterator
- @deprecated("use `iterator' instead")
+ @deprecated("use `iterator' instead", "2.8.0")
override def elements = iterator
override def toList: List[(A, B)] = imap.toList
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 393a622ced..0975b33dd0 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -40,7 +40,8 @@ class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) extends Set[A]
def iterator: Iterator[A] = set.iterator
- @deprecated("use `iterator' instead") override def elements: Iterator[A] = iterator
+ @deprecated("use `iterator' instead", "2.8.0")
+ override def elements: Iterator[A] = iterator
def +=(elem: A): this.type = { set = set + elem; this }
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index 8e244d980d..1a5e58b504 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -15,12 +15,15 @@ import parallel.mutable.ParIterable
* $iterableInfo
*/
trait Iterable[A] extends Traversable[A]
+// with GenIterable[A]
with scala.collection.Iterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]]
- with Parallelizable[A, ParIterable[A]] {
+ with Parallelizable[A, ParIterable[A]]
+{
override def companion: GenericCompanion[Iterable] = Iterable
protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `mutable.IterableLike` gets introduced, please move this there!
+ override def seq: Iterable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 28e0ca2bcf..561dcc9399 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -21,6 +21,7 @@ import generic._
*/
trait Map[A, B]
extends Iterable[(A, B)]
+// with GenMap[A, B]
with scala.collection.Map[A, B]
with MapLike[A, B, Map[A, B]] {
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index a759933926..174c3c6528 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -21,7 +21,7 @@ package mutable
*
* @since 2.8
*/
-class MapBuilder[A, B, Coll <: scala.collection.Map[A, B] with scala.collection.MapLike[A, B, Coll]](empty: Coll)
+class MapBuilder[A, B, Coll <: scala.collection.GenMap[A, B] with scala.collection.GenMapLike[A, B, Coll]](empty: Coll)
extends Builder[(A, B), Coll] {
protected var elems: Coll = empty
def +=(x: (A, B)): this.type = {
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index d0df44b69a..e737bf5509 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -7,7 +7,6 @@
\* */
-
package scala.collection
package mutable
@@ -126,8 +125,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
"As of 2.8, this operation creates a new map. To add the elements as a\n"+
"side effect to an existing map and return that map itself, use ++=."
)
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
- clone().asInstanceOf[Map[A, B1]] ++= xs
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
+ clone().asInstanceOf[Map[A, B1]] ++= xs.seq
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
@@ -163,7 +162,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* If key is not present return None.
* @param key the key to be removed
*/
- @deprecated("Use `remove' instead") def removeKey(key: A): Option[B] = remove(key)
+ @deprecated("Use `remove' instead", "2.8.0")
+ def removeKey(key: A): Option[B] = remove(key)
/** Removes all bindings from the map. After this operation has completed,
* the map will be empty.
@@ -246,5 +246,5 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
"As of 2.8, this operation creates a new map. To remove the elements as a\n"+
"side effect to an existing map and return that map itself, use --=."
)
- override def --(xs: TraversableOnce[A]): This = clone() --= xs
+ override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
}
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index 3e8f405476..d32f1559c0 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -32,7 +32,7 @@ trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]) = newProxy(self ++ xs)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq)
override def -(key: A) = newProxy(self - key)
override def += (kv: (A, B)) = { self += kv ; this }
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index fc75ae91d4..128ec0ff10 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -36,7 +36,7 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
*/
protected def makeSet: Set[B] = new HashSet[B]
- @deprecated("use addBinding instead")
+ @deprecated("use addBinding instead", "2.8.0")
def add(key: A, value: B): this.type = addBinding(key, value)
/** Assigns the specified `value` to a specified `key`, replacing
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 86b0da39f1..fb724339e3 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -104,7 +104,7 @@ extends LinearSeq[A]
len = len + 1
}
- @deprecated("use clear instead")
+ @deprecated("use clear() instead", "2.8.0")
def reset() { clear() }
/** Returns an iterator over all elements of this list.
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 0513023bfa..59648f160c 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -93,7 +93,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
@deprecated(
"Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection."
+ "Use `clone() +=' if you intend to create a new collection.", "2.8.0"
)
def +(elem: A): PriorityQueue[A] = { this.clone() += elem }
@@ -104,7 +104,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
@deprecated(
"Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection."
+ "Use `clone() ++=' if you intend to create a new collection.", "2.8.0"
)
def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
@@ -127,7 +127,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @param xs a traversable object.
* @return a new priority queue containing elements of both `xs` and `this`.
*/
- def ++(xs: TraversableOnce[A]) = { this.clone() ++= xs }
+ def ++(xs: GenTraversableOnce[A]) = { this.clone() ++= xs.seq }
/** Adds all elements to the queue.
*
@@ -163,8 +163,16 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*
* @return the element with the highest priority.
*/
+ @deprecated("Use `head` instead.", "2.9.0")
def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
+ /** Returns the element with the highest priority in the queue,
+ * or throws an error if there is no element contained in the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
+
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index f8330c3aac..70b9e82387 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -73,6 +73,14 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*
* @return the element with the highest priority.
*/
+ override def head: A = self.head
+
+ /** Returns the element with the highest priority in the queue,
+ * or throws an error if there is no element contained in the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ @deprecated("Use `head` instead.", "2.9.0")
override def max: A = self.max
/** Removes all elements from the queue. After this operation is completed,
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index e401e2d94a..d5398f0e0e 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -16,7 +16,7 @@ import generic._
/** This class is used internally to implement data structures that
* are based on resizable arrays.
*
- * @tparam A type of the elements contained in this resizeable array.
+ * @tparam A type of the elements contained in this resizable array.
*
* @author Matthias Zenger, Burak Emir
* @author Martin Odersky
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 8e7019c755..42b4ac5c15 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -25,9 +25,10 @@ import generic._
* @define coll mutable sequence
*/
trait Seq[A] extends Iterable[A]
- with scala.collection.Seq[A]
- with GenericTraversableTemplate[A, Seq]
- with SeqLike[A, Seq[A]] {
+// with GenSeq[A]
+ with scala.collection.Seq[A]
+ with GenericTraversableTemplate[A, Seq]
+ with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
override def seq: Seq[A] = this
}
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 4d315b8256..ae70378427 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -20,7 +20,8 @@ import parallel.mutable.ParSeq
trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]]
extends scala.collection.SeqLike[A, This]
with Cloneable[This]
- with Parallelizable[A, ParSeq[A]] {
+ with Parallelizable[A, ParSeq[A]]
+{
self =>
protected[this] override def parCombiner = ParSeq.newCombiner[A]
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index dba629ae67..30fc3682fd 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -23,6 +23,7 @@ import generic._
* @define coll mutable set
*/
trait Set[A] extends Iterable[A]
+// with GenSet[A]
with scala.collection.Set[A]
with GenericSetTemplate[A, Set]
with SetLike[A, Set[A]] {
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 985e7a3b47..855ba74f8c 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -129,7 +129,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
*/
def clear() { foreach(-=) }
- override def clone(): This = empty ++= repr
+ override def clone(): This = empty ++= repr.seq
/** The result when this set is used as a builder
* @return the set representation itself.
@@ -179,7 +179,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
"As of 2.8, this operation creates a new set. To add the elements as a\n"+
"side effect to an existing set and return that set itself, use ++=."
)
- override def ++(xs: TraversableOnce[A]): This = clone() ++= xs
+ override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
/** Creates a new set consisting of all the elements of this set except `elem`.
*
@@ -219,7 +219,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
"As of 2.8, this operation creates a new set. To remove the elements as a\n"+
"side effect to an existing set and return that set itself, use --=."
)
- override def --(xs: TraversableOnce[A]): This = clone() --= xs
+ override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
/** Send a message to this scriptable object.
*
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 0d1746b33b..89a02ad2ab 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -123,7 +123,7 @@ extends Seq[A]
*/
def pushAll(xs: TraversableOnce[A]): this.type = { xs.seq foreach push ; this }
- @deprecated("use pushAll")
+ @deprecated("use pushAll", "2.8.0")
@migration(2, 8, "Stack ++= now pushes arguments on the stack from left to right.")
def ++=(xs: TraversableOnce[A]): this.type = pushAll(xs)
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 245a9dd6f0..5dd5592096 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -58,7 +58,8 @@ trait StackProxy[A] extends Stack[A] with Proxy {
*
* @param iter an iterable object
*/
- @deprecated("use pushAll") override def ++=(xs: TraversableOnce[A]): this.type = { self ++= xs ; this }
+ @deprecated("use pushAll", "2.8.0")
+ override def ++=(xs: TraversableOnce[A]): this.type = { self ++= xs ; this }
override def push(elem1: A, elem2: A, elems: A*): this.type = {
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 925a9fd9da..edd59a8221 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -90,7 +90,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
def capacity: Int = underlying.capacity()
@deprecated("Use `ensureCapacity' instead. An assignment is misleading because\n"+
- "it can never decrease the capacity.")
+ "it can never decrease the capacity.", "2.8.0")
def capacity_=(n: Int) { ensureCapacity(n) }
/** Ensure that the capacity is at least the given argument.
@@ -356,28 +356,28 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
def insert(index: Int, x: Char): StringBuilder = insert(index, String.valueOf(x))
@deprecated("Use appendAll instead. This method is deprecated because of the\n"+
- "possible confusion with `append(Any)'.")
+ "possible confusion with `append(Any)'.", "2.8.0")
def append(x: Seq[Char]): StringBuilder = appendAll(x)
@deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any)'.")
+ "of the possible confusion with `append(Any)'.", "2.8.0")
def append(x: Array[Char]): StringBuilder = appendAll(x)
@deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any, Int, Int)'.")
+ "of the possible confusion with `append(Any, Int, Int)'.", "2.8.0")
def append(x: Array[Char], offset: Int, len: Int): StringBuilder = appendAll(x, offset, len)
@deprecated("use insertAll instead. This method is deprecated because of the\n"+
- "possible confusion with `insert(Int, Any, Int, Int)'.")
+ "possible confusion with `insert(Int, Any, Int, Int)'.", "2.8.0")
def insert(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder =
insertAll(index, str, offset, len)
@deprecated("use insertAll instead. This method is deprecated because of\n"+
- "the possible confusion with `insert(Int, Any)'.")
+ "the possible confusion with `insert(Int, Any)'.", "2.8.0")
def insert(at: Int, x: Seq[Char]): StringBuilder = insertAll(at, x)
@deprecated("use insertAll instead. This method is deprecated because of\n"+
- "the possible confusion with `insert(Int, Any)'.")
+ "the possible confusion with `insert(Int, Any)'.", "2.8.0")
def insert(at: Int, x: Array[Char]): StringBuilder = insertAll(at, x)
/** Finds the index of the first occurrence of the specified substring.
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index f336cc6b75..5b76d94517 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -63,7 +63,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param xs the traversable object.
*/
- override def ++(xs: TraversableOnce[A]): Self = synchronized {
+ override def ++(xs: GenTraversableOnce[A]): Self = synchronized {
super.++(xs)
}
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 3295b43d7f..159b8312b2 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -71,6 +71,14 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*
* @return the element with the highest priority.
*/
+ override def head: A = synchronized { super.head }
+
+ /** Returns the element with the highest priority in the queue,
+ * or throws an error if there is no element contained in the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ @deprecated("Use `head` instead.", "2.9.0")
override def max: A = synchronized { super.max }
/** Removes all elements from the queue. After this operation is completed,
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index e1ce39a272..c945a859f3 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -66,7 +66,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.remove(elem)
}
- override def intersect(that: scala.collection.Set[A]) = synchronized {
+ override def intersect(that: scala.collection.GenSet[A]) = synchronized {
super.intersect(that)
}
@@ -74,7 +74,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.clear
}
- override def subsetOf(that: scala.collection.Set[A]) = synchronized {
+ override def subsetOf(that: scala.collection.GenSet[A]) = synchronized {
super.subsetOf(that)
}
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index d41db458d9..b711e0b4db 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -18,10 +18,12 @@ import generic._
* @define mutability mutable
*/
trait Traversable[A] extends scala.collection.Traversable[A]
+// with GenTraversable[A]
with GenericTraversableTemplate[A, Traversable]
with TraversableLike[A, Traversable[A]]
with Mutable {
override def companion: GenericCompanion[Traversable] = Traversable
+ override def seq: Traversable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index ec45a5eb90..d1453c9ce9 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -33,7 +33,7 @@ import scala.collection.generic.Sizing
* @since 2.9
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-self: EnvironmentPassingCombiner[Elem, To] =>
+//self: EnvironmentPassingCombiner[Elem, To] =>
private[collection] final val tasksupport = getTaskSupport
/** Combines the contents of the receiver builder and the `other` builder,
@@ -66,13 +66,14 @@ self: EnvironmentPassingCombiner[Elem, To] =>
}
+/*
private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
abstract override def result = {
val res = super.result
res
}
}
-
+*/
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index aeed6949c7..0b5faf15ee 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -8,6 +8,7 @@
package scala.collection.parallel
+import scala.collection.GenIterable
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArrayCombiner
import scala.collection.parallel.mutable.ParArray
@@ -26,10 +27,14 @@ import scala.collection.parallel.mutable.ParArray
* @define Coll ParIterable
* @define coll parallel iterable
*/
-trait ParIterable[+T] extends Iterable[T]
- with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]] {
+trait ParIterable[+T]
+extends GenIterable[T]
+ with GenericParTemplate[T, ParIterable]
+ with ParIterableLike[T, ParIterable[T], Iterable[T]] {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
+ //protected[this] override def newBuilder = ParIterable.newBuilder[T]
+
+ def stringPrefix = "ParIterable"
}
/** $factoryInfo
@@ -41,3 +46,4 @@ object ParIterable extends ParFactory[ParIterable] {
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
}
+
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 50a36fa8ed..b32ea108f4 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -16,8 +16,13 @@ import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
import scala.collection.IterableLike
import scala.collection.Parallel
+import scala.collection.Parallelizable
import scala.collection.CustomParallelizable
import scala.collection.generic._
+import scala.collection.GenIterableLike
+import scala.collection.GenIterable
+import scala.collection.GenTraversableOnce
+import scala.collection.GenTraversable
import immutable.HashMapCombiner
import java.util.concurrent.atomic.AtomicBoolean
@@ -49,10 +54,10 @@ import annotation.unchecked.uncheckedVariance
* on the concept of splitters, which extend iterators. Every parallel collection defines:
*
* {{{
- * def parallelIterator: ParIterableIterator[T]
+ * def splitter: IterableSplitter[T]
* }}}
*
- * which returns an instance of `ParIterableIterator[T]`, which is a subtype of `Splitter[T]`.
+ * which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`.
* Parallel iterators have a method `remaining` to check the remaining number of elements,
* and method `split` which is defined by splitters. Method `split` divides the splitters
* iterate over into disjunct subsets:
@@ -103,7 +108,7 @@ import annotation.unchecked.uncheckedVariance
* be overridden in concrete implementations if necessary.
*
* Since this trait extends the `Iterable` trait, methods like `size` must also
- * be implemented in concrete collections, while `iterator` forwards to `parallelIterator` by
+ * be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
*
* Each parallel collection is bound to a specific fork/join pool, on which dormant worker
@@ -138,9 +143,6 @@ import annotation.unchecked.uncheckedVariance
* produce data-races, deadlocks or invalidation of state if care is not taken. It is up to the programmer
* to either avoid using side-effects or to use some form of synchronization when accessing mutable data.
*
- * @define undefinedorder
- * The order in which the operations on elements are performed is unspecified and may be nondeterministic.
- *
* @define pbfinfo
* An implicit value of class `CanCombineFrom` which determines the
* result class `That` from the current representation type `Repr` and
@@ -157,7 +159,7 @@ import annotation.unchecked.uncheckedVariance
*
*/
trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]]
-extends IterableLike[T, Repr]
+extends GenIterableLike[T, Repr]
with CustomParallelizable[T, Repr]
with Parallel
with HasNewCombiner[T, Repr]
@@ -166,7 +168,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
import tasksupport._
- override def seq: Sequential = throw new UnsupportedOperationException("not implemented.")
+ def seq: Sequential
+
+ def repr: Repr = this.asInstanceOf[Repr]
/** Parallel iterators are split iterators that have additional accessor and
* transformer methods defined in terms of methods `next` and `hasNext`.
@@ -179,11 +183,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
* The self-type ensures that signal context passing behaviour gets mixed in
* a concrete object instance.
*/
- trait ParIterator extends ParIterableIterator[T] {
+ trait ParIterator extends IterableSplitter[T] {
me: SignalContextPassingIterator[ParIterator] =>
var signalDelegate: Signalling = IdleSignalling
def repr = self.repr
- def split: Seq[ParIterableIterator[T]]
+ def split: Seq[IterableSplitter[T]]
}
/** A stackable modification that ensures signal contexts get passed along the iterators.
@@ -204,23 +208,27 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
+ def hasDefiniteSize = true
+
+ def nonEmpty = size != 0
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
*
* @return a parallel iterator
*/
- def parallelIterator: ParIterableIterator[T]
+ protected[parallel] def splitter: IterableSplitter[T]
/** Creates a new split iterator used to traverse the elements of this collection.
*
- * By default, this method is implemented in terms of the protected `parallelIterator` method.
+ * By default, this method is implemented in terms of the protected `splitter` method.
*
* @return a split iterator
*/
- def iterator: Splitter[T] = parallelIterator
+ def iterator: Splitter[T] = splitter
- override def par = repr
+ override def par: Repr = repr
/** Denotes whether this parallel collection has strict splitters.
*
@@ -249,7 +257,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
- protected[this] override def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
+ //protected[this] def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
/** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
* The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
@@ -319,14 +327,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- override def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
+ protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] {
+ def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass`
+ def apply() = bf.apply()
+ }
+
+ protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr]
+
+ def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
- override def mkString(sep: String): String = seq.mkString("", sep, "")
+ def mkString(sep: String): String = seq.mkString("", sep, "")
- override def mkString: String = seq.mkString("")
+ def mkString: String = seq.mkString("")
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
+ def canEqual(other: Any) = true
+
/** Reduces the elements of this sequence using the specified associative binary operator.
*
* $undefinedorder
@@ -343,7 +360,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* if this $coll is empty.
*/
def reduce[U >: T](op: (U, U) => U): U = {
- executeAndWaitResult(new Reduce(op, parallelIterator) mapResult { _.get })
+ executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get })
}
/** Optionally reduces the elements of this sequence using the specified associative binary operator.
@@ -362,10 +379,6 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op))
- override def reduceLeft[U >: T](op: (U, T) => U): U = iterator.reduceLeft(op)
-
- override def reduceRight[U >: T](op: (T, U) => U): U = iterator.reduceRight(op)
-
/** Folds the elements of this sequence using the specified associative binary operator.
* The order in which the elements are reduced is unspecified and may be nondeterministic.
*
@@ -382,13 +395,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the result of applying fold operator `op` between all the elements and `z`
*/
def fold[U >: T](z: U)(op: (U, U) => U): U = {
- executeAndWaitResult(new Fold(z, op, parallelIterator))
+ executeAndWaitResult(new Fold(z, op, splitter))
}
- override def foldLeft[S](z: S)(op: (S, T) => S): S = iterator.foldLeft(z)(op)
-
- override def foldRight[S](z: S)(op: (T, S) => S): S = iterator.foldRight(z)(op)
-
/** Aggregates the results of applying an operator to subsequent elements.
*
* This is a more general form of `fold` and `reduce`. It has similar semantics, but does
@@ -418,9 +427,25 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param combop an associative operator used to combine results from different partitions
*/
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- executeAndWaitResult(new Aggregate(z, seqop, combop, parallelIterator))
+ executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
+
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
+
+ def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
+
+ def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op)
+
+ def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op)
+
+ def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op)
+
+ def reduceLeftOption[U >: T](op: (U, T) => U): Option[U] = seq.reduceLeftOption(op)
+
+ def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
+
/*
/** Applies a function `f` to all the elements of $coll. Does so in a nondefined order,
* and in parallel.
@@ -430,8 +455,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
*/
- def pforeach[U](f: T => U): Unit = {
- executeAndWaitResult(new Foreach(f, parallelIterator))
+ def pareach[U](f: T => U): Unit = {
+ executeAndWaitResult(new Foreach(f, splitter))
}
*/
@@ -440,53 +465,53 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
*/
- override def foreach[U](f: T => U) = {
- executeAndWaitResult(new Foreach(f, parallelIterator))
+ def foreach[U](f: T => U) = {
+ executeAndWaitResult(new Foreach(f, splitter))
}
- override def count(p: T => Boolean): Int = {
- executeAndWaitResult(new Count(p, parallelIterator))
+ def count(p: T => Boolean): Int = {
+ executeAndWaitResult(new Count(p, splitter))
}
- override def sum[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Sum[U](num, parallelIterator))
+ def sum[U >: T](implicit num: Numeric[U]): U = {
+ executeAndWaitResult(new Sum[U](num, splitter))
}
- override def product[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Product[U](num, parallelIterator))
+ def product[U >: T](implicit num: Numeric[U]): U = {
+ executeAndWaitResult(new Product[U](num, splitter))
}
- override def min[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Min(ord, parallelIterator) mapResult { _.get }).asInstanceOf[T]
+ def min[U >: T](implicit ord: Ordering[U]): T = {
+ executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
- override def max[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Max(ord, parallelIterator) mapResult { _.get }).asInstanceOf[T]
+ def max[U >: T](implicit ord: Ordering[U]): T = {
+ executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
- override def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
+ def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
if (isEmpty) throw new UnsupportedOperationException("empty.maxBy")
reduce((x, y) => if (cmp.gteq(f(x), f(y))) x else y)
}
- override def minBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
+ def minBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
if (isEmpty) throw new UnsupportedOperationException("empty.minBy")
reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
}
- override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Map[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.map(f)(bf)
+ def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.map(f)(bf2seq(bf))
- override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Collect[S, That](pf, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.collect(pf)(bf)
+ def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
+ } otherwise seq.collect(pf)(bf2seq(bf))
- override def flatMap[S, That](f: T => TraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new FlatMap[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.flatMap(f)(bf)
+ def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.flatMap(f)(bf2seq(bf))
/** Tests whether a predicate holds for all elements of this $coll.
*
@@ -495,8 +520,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param p a predicate used to test elements
* @return true if `p` holds for all elements, false otherwise
*/
- override def forall(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Forall(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
+ def forall(pred: T => Boolean): Boolean = {
+ executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Tests whether a predicate holds for some element of this $coll.
@@ -506,8 +531,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param p a predicate used to test elements
* @return true if `p` holds for some element, false otherwise
*/
- override def exists(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Exists(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
+ def exists(pred: T => Boolean): Boolean = {
+ executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Finds some element in the collection for which the predicate holds, if such
@@ -521,30 +546,30 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param p predicate used to test the elements
* @return an option value with the element if such an element exists, or `None` otherwise
*/
- override def find(pred: T => Boolean): Option[T] = {
- executeAndWaitResult(new Find(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
+ def find(pred: T => Boolean): Option[T] = {
+ executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
protected[this] def cbfactory ={
() => newCombiner
}
- override def filter(pred: T => Boolean): Repr = {
- executeAndWaitResult(new Filter(pred, cbfactory, parallelIterator) mapResult { _.result })
+ def filter(pred: T => Boolean): Repr = {
+ executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result })
}
- override def filterNot(pred: T => Boolean): Repr = {
- executeAndWaitResult(new FilterNot(pred, cbfactory, parallelIterator) mapResult { _.result })
+ def filterNot(pred: T => Boolean): Repr = {
+ executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result })
}
- override def ++[U >: T, That](that: TraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
if (that.isParallel && bf.isParallel) {
// println("case both are parallel")
val other = that.asParIterable
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), parallelIterator)
+ val copythis = new Copy(() => pbf(repr), splitter)
val copythat = wrap {
- val othtask = new other.Copy(() => pbf(self.repr), other.parallelIterator)
+ val othtask = new other.Copy(() => pbf(self.repr), other.splitter)
tasksupport.executeAndWaitResult(othtask)
}
val task = (copythis parallel copythat) { _ combine _ } mapResult {
@@ -554,37 +579,36 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else if (bf.isParallel) {
// println("case parallel builder, `that` not parallel")
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), parallelIterator)
+ val copythis = new Copy(() => pbf(repr), splitter)
val copythat = wrap {
val cb = pbf(repr)
- for (elem <- that) cb += elem
+ for (elem <- that.seq) cb += elem
cb
}
executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.result })
} else {
// println("case not a parallel builder")
val b = bf(repr)
- this.parallelIterator.copy2builder[U, That, Builder[U, That]](b)
- if (that.isInstanceOf[Parallel]) for (elem <- that.asInstanceOf[Iterable[U]].iterator) b += elem
- else for (elem <- that) b += elem
+ this.splitter.copy2builder[U, That, Builder[U, That]](b)
+ for (elem <- that.seq) b += elem
b.result
}
}
- override def partition(pred: T => Boolean): (Repr, Repr) = {
- executeAndWaitResult(new Partition(pred, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
+ def partition(pred: T => Boolean): (Repr, Repr) = {
+ executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
}
- override def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
- executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], parallelIterator) mapResult {
+ def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
+ executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
rcb => rcb.groupByKey(cbfactory)
})
}
- override def take(n: Int): Repr = {
+ def take(n: Int): Repr = {
val actualn = if (size > n) n else size
if (actualn < MIN_FOR_COPY) take_sequential(actualn)
- else executeAndWaitResult(new Take(actualn, cbfactory, parallelIterator) mapResult {
+ else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult {
_.result
})
}
@@ -592,7 +616,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
private def take_sequential(n: Int) = {
val cb = newCombiner
cb.sizeHint(n)
- val it = parallelIterator
+ val it = splitter
var left = n
while (left > 0) {
cb += it.next
@@ -601,14 +625,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb.result
}
- override def drop(n: Int): Repr = {
+ def drop(n: Int): Repr = {
val actualn = if (size > n) n else size
if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn)
- else executeAndWaitResult(new Drop(actualn, cbfactory, parallelIterator) mapResult { _.result })
+ else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result })
}
private def drop_sequential(n: Int) = {
- val it = parallelIterator drop n
+ val it = splitter drop n
val cb = newCombiner
cb.sizeHint(size - n)
while (it.hasNext) cb += it.next
@@ -619,13 +643,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
val from = unc_from min size max 0
val until = unc_until min size max from
if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until)
- else executeAndWaitResult(new Slice(from, until, cbfactory, parallelIterator) mapResult { _.result })
+ else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result })
}
private def slice_sequential(from: Int, until: Int): Repr = {
val cb = newCombiner
var left = until - from
- val it = parallelIterator drop from
+ val it = splitter drop from
while (left > 0) {
cb += it.next
left -= 1
@@ -633,8 +657,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb.result
}
- override def splitAt(n: Int): (Repr, Repr) = {
- executeAndWaitResult(new SplitAt(n, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
+ def splitAt(n: Int): (Repr, Repr) = {
+ executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
}
/** Computes a prefix scan of the elements of the collection.
@@ -652,13 +676,20 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* @return a new $coll containing the prefix scan of the elements in this $coll
*/
- def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[Repr, U, That]): That = if (parallelismLevel > 1) {
- if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, parallelIterator) mapResult {
- tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
- cb => cb.result
- })
- }) else (cbf(self.repr) += z).result
- } else super.scanLeft(z)(op)(cbf)
+ def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf.isParallel) {
+ val cbf = bf.asParallel
+ if (parallelismLevel > 1) {
+ if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
+ tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
+ cb => cb.result
+ })
+ }) else (cbf(self.repr) += z).result
+ } else seq.scan(z)(op)(bf2seq(bf))
+ } else seq.scan(z)(op)(bf2seq(bf))
+
+ def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanLeft(z)(op)(bf2seq(bf))
+
+ def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanRight(z)(op)(bf2seq(bf))
/** Takes the longest prefix of elements that satisfy the predicate.
*
@@ -668,10 +699,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param pred the predicate used to test the elements
* @return the longest prefix of this $coll of elements that satisy the predicate `pred`
*/
- override def takeWhile(pred: T => Boolean): Repr = {
+ def takeWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new TakeWhile(0, pred, cbfactory, parallelIterator assign cntx) mapResult { _._1.result })
+ executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result })
}
/** Splits this $coll into a prefix/suffix pair according to a predicate.
@@ -683,10 +714,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return a pair consisting of the longest prefix of the collection for which all
* the elements satisfy `pred`, and the rest of the collection
*/
- override def span(pred: T => Boolean): (Repr, Repr) = {
+ def span(pred: T => Boolean): (Repr, Repr) = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, parallelIterator assign cntx) mapResult {
+ executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult {
p => (p._1.result, p._2.result)
})
}
@@ -701,42 +732,51 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return a collection composed of all the elements after the longest prefix of elements
* in this $coll that satisfy the predicate `pred`
*/
- override def dropWhile(pred: T => Boolean): Repr = {
+ def dropWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, parallelIterator assign cntx) mapResult { _._2.result })
+ executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result })
}
- override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
- executeAndWaitResult(new CopyToArray(start, len, xs, parallelIterator))
+ def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0)
+
+ def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start)
+
+ def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
+ executeAndWaitResult(new CopyToArray(start, len, xs, splitter))
}
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
+ def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that)
+
+ def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(pbf, parallelIterator, thatseq.parallelIterator) mapResult { _.result });
- } else super.zip(that)(bf)
+ executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result });
+ } else seq.zip(that)(bf2seq(bf))
- override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
+ def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
- override def zipAll[S, U >: T, That](that: Iterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
+ def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, parallelIterator, thatseq.parallelIterator) mapResult { _.result });
- } else super.zipAll(that, thisElem, thatElem)(bf)
+ executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result });
+ } else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf))
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- executeAndWaitResult(new ToParCollection(cbf, parallelIterator) mapResult { _.result });
+ executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result });
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
- executeAndWaitResult(new ToParMap(cbf, parallelIterator)(ev) mapResult { _.result })
+ executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result })
}
- override def view = new ParIterableView[T, Repr, Sequential] {
+ def view = new ParIterableView[T, Repr, Sequential] {
protected lazy val underlying = self.repr
+ protected[this] def viewIdentifier = ""
+ protected[this] def viewIdString = ""
override def seq = self.seq.view
- def parallelIterator = self.parallelIterator
+ def splitter = self.splitter
+ def size = splitter.remaining
}
override def toArray[U >: T: ClassManifest]: Array[U] = {
@@ -751,13 +791,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toStream: Stream[T] = seq.toStream
- override def toIterator: Iterator[T] = parallelIterator
+ override def toIterator: Iterator[T] = splitter
// the methods below are overridden
override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
- override def toTraversable: Traversable[T] = this.asInstanceOf[Traversable[T]] // TODO add ParTraversable[T]
+ override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T]
override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]]
@@ -782,8 +822,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
protected trait Accessor[R, Tp]
extends StrictSplitterCheckTask[R, Tp] {
- protected[this] val pit: ParIterableIterator[T]
- protected[this] def newSubtask(p: ParIterableIterator[T]): Accessor[R, Tp]
+ protected[this] val pit: IterableSplitter[T]
+ protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel)
def split = pit.split.map(newSubtask(_)) // default split procedure
private[parallel] override def signalAbort = pit.abort
@@ -851,102 +891,102 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
- protected[this] class Foreach[S](op: T => S, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Unit, Foreach[S]] {
+ protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
def leaf(prevr: Option[Unit]) = pit.foreach(op)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Foreach[S](op, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p)
}
- protected[this] class Count(pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Int, Count] {
+ protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] {
// val pittxt = pit.toString
@volatile var result: Int = 0
def leaf(prevr: Option[Int]) = result = pit.count(pred)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Count(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Count(pred, p)
override def merge(that: Count) = result = result + that.result
// override def toString = "CountTask(" + pittxt + ")"
}
- protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Reduce[U]] {
+ protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Reduce(op, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p)
override def merge(that: Reduce[U]) =
if (this.result == None) result = that.result
else if (that.result != None) result = Some(op(result.get, that.result.get))
override def requiresStrictSplitters = true
}
- protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: ParIterableIterator[T]) extends Accessor[U, Fold[U]] {
+ protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Fold(z, op, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p)
override def merge(that: Fold[U]) = result = op(result, that.result)
}
- protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[S, Aggregate[S]] {
@volatile var result: S = null.asInstanceOf[S]
def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Aggregate(z, seqop, combop, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p)
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
- protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[U, Sum[U]] {
+ protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.sum(num)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Sum(num, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p)
override def merge(that: Sum[U]) = result = num.plus(result, that.result)
}
- protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[U, Product[U]] {
+ protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.product(num)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Product(num, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p)
override def merge(that: Product[U]) = result = num.times(result, that.result)
}
- protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Min[U]] {
+ protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Min(ord, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p)
override def merge(that: Min[U]) =
if (this.result == None) result = that.result
else if (that.result != None) result = if (ord.lteq(result.get, that.result.get)) result else that.result
override def requiresStrictSplitters = true
}
- protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Max[U]] {
+ protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Max(ord, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p)
override def merge(that: Max[U]) =
if (this.result == None) result = that.result
else if (that.result != None) result = if (ord.gteq(result.get, that.result.get)) result else that.result
override def requiresStrictSplitters = true
}
- protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Map[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf(self.repr)))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Map(f, pbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p)
override def merge(that: Map[S, That]) = result = result combine that.result
}
protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParIterableIterator[T])
+ (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Collect[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Collect(pf, pbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Collect(pf, pbf, p)
override def merge(that: Collect[S, That]) = result = result combine that.result
}
- protected[this] class FlatMap[S, That](f: T => TraversableOnce[S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class FlatMap[S, That](f: T => GenTraversableOnce[S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], FlatMap[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new FlatMap(f, pbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new FlatMap(f, pbf, p)
override def merge(that: FlatMap[S, That]) = {
//debuglog("merging " + result + " and " + that.result)
result = result combine that.result
@@ -954,67 +994,67 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Forall(pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Boolean, Forall] {
+ protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Forall(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
- protected[this] class Exists(pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Boolean, Exists] {
+ protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Exists(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
- protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Find[U]] {
+ protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Find(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
- protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Filter[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
result = pit.filter2combiner(pred, reuse(prev, cbf()))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Filter(pred, cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Filter(pred, cbf, p)
override def merge(that: Filter[U, This]) = result = result combine that.result
}
- protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], FilterNot[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
result = pit.filterNot2combiner(pred, reuse(prev, cbf()))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new FilterNot(pred, cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new FilterNot(pred, cbf, p)
override def merge(that: FilterNot[U, This]) = result = result combine that.result
}
- protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: ParIterableIterator[T])
+ protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Copy[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Copy[U, That](cfactory, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Copy[U, That](cfactory, p)
override def merge(that: Copy[U, That]) = result = result combine that.result
}
- protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Partition(pred, cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p)
override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
}
protected[this] class GroupBy[K, U >: T](
f: U => K,
mcf: () => HashMapCombiner[K, U],
- protected[this] val pit: ParIterableIterator[T]
+ protected[this] val pit: IterableSplitter[T]
) extends Transformer[HashMapCombiner[K, U], GroupBy[K, U]] {
@volatile var result: Result = null
final def leaf(prev: Option[Result]) = {
@@ -1026,7 +1066,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
result = cb
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new GroupBy(f, mcf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new GroupBy(f, mcf, p)
override def merge(that: GroupBy[K, U]) = {
// note: this works because we know that a HashMapCombiner doesn't merge same keys until evaluation
// --> we know we're not dropping any mappings
@@ -1034,13 +1074,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Take[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
result = pit.take2combiner(n, reuse(prev, cbf()))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1053,11 +1093,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Drop[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1070,11 +1110,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Slice[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1088,11 +1128,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1103,14 +1143,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class TakeWhile[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
@volatile var result: (Combiner[U, This], Boolean) = null
def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
result = pit.takeWhile2combiner(pred, reuse(prev.map(_._1), cbf()))
if (!result._2) pit.setIndexFlagIfLesser(pos)
} else result = (reuse(prev.map(_._1), cbf()), false)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p)
@@ -1122,20 +1162,20 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class Span[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
// val lst = pit.toList
// val pa = mutable.ParArray(lst: _*)
- // val str = "At leaf we will iterate: " + pa.parallelIterator.toList
+ // val str = "At leaf we will iterate: " + pa.splitter.toList
result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised
// println("\nAt leaf result is: " + result)
if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
} else {
result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p)
@@ -1148,11 +1188,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: ParIterableIterator[T], val othpit: ParSeqIterator[S])
+ protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
val pits = pit.split
val sizes = pits.map(_.remaining)
@@ -1164,11 +1204,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class ZipAll[U >: T, S, That]
- (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: ParIterableIterator[T], val othpit: ParSeqIterator[S])
+ (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = if (pit.remaining <= len) {
val pits = pit.split
val sizes = pits.map(_.remaining)
@@ -1179,18 +1219,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
val diff = len - pit.remaining
Seq(
new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed
- new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).parallelIterator.asInstanceOf[ParIterableIterator[T]], opits(1))
+ new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).splitter.asInstanceOf[IterableSplitter[T]], opits(1))
)
}
override def merge(that: ZipAll[U, S, That]) = result = result combine that.result
override def requiresStrictSplitters = true
}
- protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], protected[this] val pit: IterableSplitter[T])
extends Accessor[Unit, CopyToArray[U, This]] {
@volatile var result: Unit = ()
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
@@ -1201,29 +1241,29 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], ToParCollection[U, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[U, That]]) {
result = cbf()
while (pit.hasNext) result += pit.next
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new ToParCollection[U, That](cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParCollection[U, That](cbf, p)
override def merge(that: ToParCollection[U, That]) = result = result combine that.result
}
- protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: ParIterableIterator[T])(implicit ev: T <:< (K, V))
+ protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[(K, V), That]]) {
result = cbf()
while (pit.hasNext) result += pit.next
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new ToParMap[K, V, That](cbf, p)(ev)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParMap[K, V, That](cbf, p)(ev)
override def merge(that: ToParMap[K, V, That]) = result = result combine that.result
}
- protected[this] class CreateScanTree[U >: T](from: Int, len: Int, z: U, op: (U, U) => U, protected[this] val pit: ParIterableIterator[T])
+ protected[this] class CreateScanTree[U >: T](from: Int, len: Int, z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
extends Transformer[ScanTree[U], CreateScanTree[U]] {
@volatile var result: ScanTree[U] = null
def leaf(prev: Option[ScanTree[U]]) = if (pit.remaining > 0) {
@@ -1247,7 +1287,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val half = howmany / 2
ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half))
} else trees(from)
- protected[this] def newSubtask(pit: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
@@ -1325,7 +1365,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] case class ScanLeaf[U >: T]
- (pit: ParIterableIterator[U], op: (U, U) => U, from: Int, len: Int, var prev: Option[ScanLeaf[U]], var acc: U)
+ (pit: IterableSplitter[U], op: (U, U) => U, from: Int, len: Int, var prev: Option[ScanLeaf[U]], var acc: U)
extends ScanTree[U] {
def beginsAt = from
def pushdown(v: U) = {
diff --git a/src/library/scala/collection/parallel/ParIterableView.scala b/src/library/scala/collection/parallel/ParIterableView.scala
index f8e0f9909c..50f0e4f869 100644
--- a/src/library/scala/collection/parallel/ParIterableView.scala
+++ b/src/library/scala/collection/parallel/ParIterableView.scala
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.IterableView
+import scala.collection.GenIterableView
import scala.collection.generic.CanCombineFrom
@@ -29,13 +30,14 @@ import scala.collection.generic.CanCombineFrom
*/
trait ParIterableView[+T, +Coll <: Parallel, +CollSeq]
extends ParIterableViewLike[T, Coll, CollSeq, ParIterableView[T, Coll, CollSeq], IterableView[T, CollSeq]]
+ with GenIterableView[T, Coll]
object ParIterableView {
abstract class NoCombiner[T] extends Combiner[T, Nothing] {
- self: EnvironmentPassingCombiner[T, Nothing] =>
+// self: EnvironmentPassingCombiner[T, Nothing] =>
def +=(elem: T): this.type = this
def iterator: Iterator[T] = Iterator.empty
def result() = throw new UnsupportedOperationException("ParIterableView.Combiner.result")
@@ -49,8 +51,8 @@ object ParIterableView {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterableView[T, ParIterable[T], Iterable[T]]] =
new CanCombineFrom[Coll, T, ParIterableView[T, ParIterable[T], Iterable[T]]] {
- def apply(from: Coll) = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
- def apply() = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
+ def apply(from: Coll) = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
+ def apply() = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
}
}
@@ -63,3 +65,4 @@ object ParIterableView {
+
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 41ba0624bb..1d7659922c 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -10,10 +10,17 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.{ IterableView, IterableViewLike }
+import scala.collection.{ GenIterableView, GenIterableViewLike }
+import scala.collection.GenTraversableOnce
+import scala.collection.GenTraversable
+import scala.collection.GenIterable
+import scala.collection.GenSeq
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
+
+
/** A template view of a non-strict view of parallel iterable collection.
*
* '''Note:''' Regular view traits have type parameters used to carry information
@@ -34,58 +41,64 @@ trait ParIterableViewLike[+T,
+CollSeq,
+This <: ParIterableView[T, Coll, CollSeq] with ParIterableViewLike[T, Coll, CollSeq, This, ThisSeq],
+ThisSeq <: IterableView[T, CollSeq] with IterableViewLike[T, CollSeq, ThisSeq]]
-extends IterableView[T, Coll]
- with IterableViewLike[T, Coll, This]
+extends GenIterableView[T, Coll]
+ with GenIterableViewLike[T, Coll, This]
with ParIterable[T]
with ParIterableLike[T, This, ThisSeq]
{
self =>
import tasksupport._
+ override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
+ protected[this] def viewIdentifier: String
+ protected[this] def viewIdString: String
+
+ protected def underlying: Coll
/* wrappers */
trait Transformed[+S] extends ParIterableView[S, Coll, CollSeq] with super.Transformed[S] {
- override def parallelIterator: ParIterableIterator[S]
- override def iterator = parallelIterator
+ override def splitter: IterableSplitter[S]
+ override def iterator = splitter
+ def size = splitter.remaining
}
trait Sliced extends super.Sliced with Transformed[T] {
// override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
- def parallelIterator: ParIterableIterator[T] = self.parallelIterator.slice(from, until)
+ def splitter: IterableSplitter[T] = self.splitter.slice(from, until)
override def seq = self.seq.slice(from, until)
}
trait Mapped[S] extends super.Mapped[S] with Transformed[S]{
- def parallelIterator: ParIterableIterator[S] = self.parallelIterator.map(mapping)
+ def splitter: IterableSplitter[S] = self.splitter.map(mapping)
override def seq = self.seq.map(mapping).asInstanceOf[IterableView[S, CollSeq]]
}
// only use if other is a ParIterable, otherwise force
trait Appended[U >: T] extends super.Appended[U] with Transformed[U] {
def restPar: ParIterable[U] = rest.asParIterable
- def parallelIterator: ParIterableIterator[U] = self.parallelIterator.appendParIterable[U, ParIterableIterator[U]](restPar.parallelIterator)
+ def splitter: IterableSplitter[U] = self.splitter.appendParIterable[U, IterableSplitter[U]](restPar.splitter)
override def seq = self.seq.++(rest).asInstanceOf[IterableView[U, CollSeq]]
}
trait Forced[S] extends super.Forced[S] with Transformed[S] {
def forcedPar: ParIterable[S] = forced.asParIterable
- def parallelIterator: ParIterableIterator[S] = forcedPar.parallelIterator
+ def splitter: IterableSplitter[S] = forcedPar.splitter
override def seq = forcedPar.seq.view.asInstanceOf[IterableView[S, CollSeq]]
}
// only use if other is a ParSeq, otherwise force
trait Zipped[S] extends super.Zipped[S] with Transformed[(T, S)] {
def otherPar: ParSeq[S] = other.asParSeq
- def parallelIterator: ParIterableIterator[(T, S)] = self.parallelIterator zipParSeq otherPar.parallelIterator
+ def splitter: IterableSplitter[(T, S)] = self.splitter zipParSeq otherPar.splitter
override def seq = (self.seq zip other).asInstanceOf[IterableView[(T, S), CollSeq]]
}
// only use if other is a ParSeq, otherwise force
trait ZippedAll[U >: T, S] extends super.ZippedAll[U, S] with Transformed[(U, S)] {
def otherPar: ParSeq[S] = other.asParSeq
- def parallelIterator: ParIterableIterator[(U, S)] = self.parallelIterator.zipAllParSeq(otherPar.parallelIterator, thisElem, thatElem)
+ def splitter: IterableSplitter[(U, S)] = self.splitter.zipAllParSeq(otherPar.splitter, thisElem, thatElem)
override def seq = (self.seq.zipAll(other, thisElem, thatElem)).asInstanceOf[IterableView[(U, S), CollSeq]]
}
@@ -95,11 +108,11 @@ self =>
/* operation overrides */
override def take(n: Int): This = newSliced(SliceInterval(0, n))
- override def drop(n: Int): This = newSliced(SliceInterval(n, parallelIterator.remaining))
+ override def drop(n: Int): This = newSliced(SliceInterval(n, splitter.remaining))
override def splitAt(n: Int): (This, This) = (take(n), drop(n))
override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until))
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
- override def ++[U >: T, That](xs: TraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppendedTryParIterable(xs.toTraversable).asInstanceOf[That]
+ override def ++[U >: T, That](xs: GenTraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppendedTryParIterable(xs.toTraversable).asInstanceOf[That]
override def filter(p: T => Boolean): This = newForced(thisParSeq.filter(p)).asInstanceOf[This]
override def filterNot(p: T => Boolean): This = newForced(thisParSeq.filterNot(p)).asInstanceOf[This]
@@ -113,16 +126,16 @@ self =>
val (pref, suff) = thisParSeq.span(p)
(newForced(pref).asInstanceOf[This], newForced(suff).asInstanceOf[This])
}
- override def flatMap[S, That](f: T => TraversableOnce[S])(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.flatMap(f)).asInstanceOf[That]
+ override def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.flatMap(f)).asInstanceOf[That]
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, parallelIterator.remaining, 1, false)).asInstanceOf[That]
- override def zipAll[S, U >: T, That](that: Iterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
+ newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
+ override def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, parallelIterator).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
+ executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
@@ -131,16 +144,16 @@ self =>
/* wrapper virtual ctors */
- protected override def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
- protected override def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
- protected override def newForced[S](xs: => Seq[S]): Transformed[S] = new Forced[S] { val forced = xs }
- protected override def newAppended[U >: T](that: Traversable[U]): Transformed[U] = new Appended[U] { val rest = that }
- protected override def newDroppedWhile(p: T => Boolean) = unsupported
- protected override def newTakenWhile(p: T => Boolean) = unsupported
- protected override def newFlatMapped[S](f: T => TraversableOnce[S]) = unsupported
- protected override def newFiltered(p: T => Boolean) = unsupported
- protected override def newZipped[S](that: Iterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
- protected override def newZippedAll[U >: T, S](that: Iterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
+ protected def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
+ protected def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
+ protected def newForced[S](xs: => GenSeq[S]): Transformed[S] = new Forced[S] { val forced = xs }
+ protected def newAppended[U >: T](that: GenTraversable[U]): Transformed[U] = new Appended[U] { val rest = that }
+ protected def newDroppedWhile(p: T => Boolean) = unsupported
+ protected def newTakenWhile(p: T => Boolean) = unsupported
+ protected def newFlatMapped[S](f: T => GenTraversableOnce[S]) = unsupported
+ protected def newFiltered(p: T => Boolean) = unsupported
+ protected def newZipped[S](that: GenIterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
+ protected def newZippedAll[U >: T, S](that: GenIterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
@@ -148,31 +161,31 @@ self =>
/* argument sequence dependent ctors */
- protected def newForcedTryParIterable[S](xs: => Seq[S]): Transformed[S] = {
+ protected def newForcedTryParIterable[S](xs: => GenSeq[S]): Transformed[S] = {
if (xs.isParIterable) newForced[S](xs)
else newForced(mutable.ParArray.fromTraversables(xs))
}
- protected def newAppendedTryParIterable[U >: T](that: Traversable[U]): Transformed[U] = {
+ protected def newAppendedTryParIterable[U >: T](that: GenTraversable[U]): Transformed[U] = {
// we only append if `that` is a parallel iterable, i.e. it has a splitter
if (that.isParIterable) newAppended(that)
else newAppended(mutable.ParArray.fromTraversables(that))
}
- protected def newZippedTryParSeq[S](that: Iterable[S]): Transformed[(T, S)] = {
+ protected def newZippedTryParSeq[S](that: GenIterable[S]): Transformed[(T, S)] = {
if (that.isParSeq) newZipped[S](that)
else newZipped[S](mutable.ParArray.fromTraversables(that))
}
- protected def newZippedAllTryParSeq[S, U >: T](that: Iterable[S], thisElem: U, thatElem: S): Transformed[(U, S)] = {
+ protected def newZippedAllTryParSeq[S, U >: T](that: GenIterable[S], thisElem: U, thatElem: S): Transformed[(U, S)] = {
if (that.isParSeq) newZippedAll(that, thisElem, thatElem)
else newZippedAll(mutable.ParArray.fromTraversables(that), thisElem, thatElem)
}
/* tasks */
- protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Force[U, That]] {
var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Force(cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Force(cbf, p)
override def merge(that: Force[U, That]) = result = result combine that.result
}
@@ -187,3 +200,4 @@ self =>
+
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index e6763ef83c..c696099007 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Map
+import scala.collection.GenMap
import scala.collection.mutable.Builder
import scala.collection.generic.ParMapFactory
import scala.collection.generic.GenericParMapTemplate
@@ -35,7 +36,7 @@ import scala.collection.generic.CanCombineFrom
* @since 2.9
*/
trait ParMap[K, +V]
-extends Map[K, V]
+extends GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with ParIterable[(K, V)]
with ParMapLike[K, V, ParMap[K, V], Map[K, V]]
@@ -44,7 +45,9 @@ self =>
def mapCompanion: GenericParMapCompanion[ParMap] = ParMap
- override def empty: ParMap[K, V] = new mutable.ParHashMap[K, V]
+ //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
+
+ def empty: ParMap[K, V] = new mutable.ParHashMap[K, V]
override def stringPrefix = "ParMap"
}
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 1cd5ad02d7..5b855468c4 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -13,6 +13,7 @@ package scala.collection.parallel
import scala.collection.MapLike
+import scala.collection.GenMapLike
import scala.collection.Map
import scala.collection.mutable.Builder
@@ -38,15 +39,19 @@ trait ParMapLike[K,
+V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
+Sequential <: Map[K, V] with MapLike[K, V, Sequential]]
-extends MapLike[K, V, Repr]
+extends GenMapLike[K, V, Repr]
with ParIterableLike[(K, V), Repr, Sequential]
-{ self =>
+{
+self =>
- protected[this] override def newBuilder: Builder[(K, V), Repr] = newCombiner
+ def default(key: K): V = throw new NoSuchElementException("key not found: " + key)
- protected[this] override def newCombiner: Combiner[(K, V), Repr] = unsupportedop("Must implement `newCombiner` in concrete collections.")
+ def empty: Repr
- override def empty: Repr
+ def apply(key: K) = get(key) match {
+ case Some(v) => v
+ case None => default(key)
+ }
// note - should not override toMap (could be mutable)
}
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index 89e77b24d7..eefd0a727b 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -16,6 +16,7 @@ import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericParTemplate
import scala.collection.generic.ParFactory
import scala.collection.generic.CanCombineFrom
+import scala.collection.GenSeq
import scala.collection.parallel.mutable.ParArrayCombiner
import scala.collection.parallel.mutable.ParArray
@@ -31,15 +32,19 @@ import scala.collection.parallel.mutable.ParArray
*
* @author Aleksandar Prokopec
*/
-trait ParSeq[+T] extends Seq[T]
+trait ParSeq[+T] extends GenSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], Seq[T]]
+{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
+ //protected[this] override def newBuilder = ParSeq.newBuilder[T]
def apply(i: Int): T
override def toString = super[ParIterable].toString
+
+ override def stringPrefix = getClass.getSimpleName
}
@@ -49,10 +54,8 @@ object ParSeq extends ParFactory[ParSeq] {
def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
-}
-
-
+}
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 189db237b9..c9e6b45bd6 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -6,12 +6,14 @@
** |/ **
\* */
-
package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.SeqLike
+import scala.collection.GenSeqLike
+import scala.collection.GenSeq
+import scala.collection.GenIterable
import scala.collection.generic.DefaultSignalling
import scala.collection.generic.AtomicIndexFlag
import scala.collection.generic.CanBuildFrom
@@ -19,8 +21,6 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.VolatileAbort
-
-
/** A template trait for sequences of type `ParSeq[T]`, representing
* parallel sequences with element type `T`.
*
@@ -46,12 +46,12 @@ import scala.collection.generic.VolatileAbort
* @since 2.9
*/
trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]]
-extends scala.collection.SeqLike[T, Repr]
+extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
import tasksupport._
- type SuperParIterator = ParIterableIterator[T]
+ type SuperParIterator = IterableSplitter[T]
/** An iterator that can be split into arbitrary subsets of iterators.
* The self-type requirement ensures that the signal context passing behaviour gets mixed in
@@ -60,14 +60,14 @@ self =>
* '''Note:''' In concrete collection classes, collection implementers might want to override the iterator
* `reverse2builder` method to ensure higher efficiency.
*/
- trait ParIterator extends ParSeqIterator[T] with super.ParIterator {
+ trait ParIterator extends SeqSplitter[T] with super.ParIterator {
me: SignalContextPassingIterator[ParIterator] =>
def split: Seq[ParIterator]
def psplit(sizes: Int*): Seq[ParIterator]
}
/** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParallelIterator` ensures that this trait gets mixed into
+ * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
* concrete iterators.
*/
trait SignalContextPassingIterator[+IterRepr <: ParIterator]
@@ -87,9 +87,9 @@ self =>
*
* @return an iterator that can be split into subsets of precise size
*/
- def parallelIterator: ParSeqIterator[T]
+ protected[parallel] def splitter: SeqSplitter[T]
- override def iterator: PreciseSplitter[T] = parallelIterator
+ override def iterator: PreciseSplitter[T] = splitter
override def size = length
@@ -139,15 +139,13 @@ self =>
* @return the length of the longest segment of elements starting at `from` and
* satisfying the predicate
*/
- override def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else {
+ def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else {
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new SegmentLength(p, 0, parallelIterator.psplit(realfrom, length - realfrom)(1) assign ctx))._1
+ executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1
}
- override def prefixLength(p: T => Boolean) = segmentLength(p, 0)
-
/** Finds the first element satisfying some predicate.
*
* $indexsignalling
@@ -159,21 +157,13 @@ self =>
* @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists
*/
- override def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else {
+ def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else {
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new IndexWhere(p, realfrom, parallelIterator.psplit(realfrom, length - realfrom)(1) assign ctx))
+ executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))
}
- override def indexWhere(p: T => Boolean): Int = indexWhere(p, 0)
-
- override def findIndexOf(p: T => Boolean): Int = indexWhere(p, 0)
-
- override def indexOf[U >: T](elem: U): Int = indexOf(elem, 0)
-
- override def indexOf[U >: T](elem: U, from: Int): Int = indexWhere(elem ==, from)
-
/** Finds the last element satisfying some predicate.
*
* $indexsignalling
@@ -185,22 +175,20 @@ self =>
* @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists
*/
- override def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else {
+ def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else {
val until = if (end >= length) length else end + 1
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MinValue)
- executeAndWaitResult(new LastIndexWhere(p, 0, parallelIterator.psplit(until, length - until)(0) assign ctx))
+ executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx))
}
- override def reverse: Repr = {
- executeAndWaitResult(new Reverse(() => newCombiner, parallelIterator) mapResult { _.result })
+ def reverse: Repr = {
+ executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.result })
}
- override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new ReverseMap[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.reverseMap(f)(bf)
-
- override def startsWith[S](that: Seq[S]): Boolean = startsWith(that, 0)
+ def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.reverseMap(f)(bf2seq(bf))
/** Tests whether this $coll contains the given sequence at a given index.
*
@@ -211,22 +199,22 @@ self =>
* @param offset the starting offset for the search
* @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise
*/
- override def startsWith[S](that: Seq[S], offset: Int): Boolean = that ifParSeq { pthat =>
+ def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat =>
if (offset < 0 || offset >= length) offset == length && pthat.length == 0
else if (pthat.length == 0) true
else if (pthat.length > length - offset) false
else {
val ctx = new DefaultSignalling with VolatileAbort
- executeAndWaitResult(new SameElements(parallelIterator.psplit(offset, pthat.length)(1) assign ctx, pthat.parallelIterator))
+ executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter))
}
- } otherwise super.startsWith(that, offset)
+ } otherwise seq.startsWith(that, offset)
- override def sameElements[U >: T](that: Iterable[U]): Boolean = that ifParSeq { pthat =>
+ override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new SameElements(parallelIterator assign ctx, pthat.parallelIterator))
- } otherwise super.sameElements(that)
+ length == pthat.length && executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter))
+ } otherwise seq.sameElements(that)
- /** Tests whether this $coll ends with the given parallel sequence
+ /** Tests whether this $coll ends with the given parallel sequence.
*
* $abortsignalling
*
@@ -234,65 +222,65 @@ self =>
* @param that the sequence to test
* @return `true` if this $coll has `that` as a suffix, `false` otherwise
*/
- override def endsWith[S](that: Seq[S]): Boolean = that ifParSeq { pthat =>
+ def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat =>
if (that.length == 0) true
else if (that.length > length) false
else {
val ctx = new DefaultSignalling with VolatileAbort
val tlen = that.length
- executeAndWaitResult(new SameElements(parallelIterator.psplit(length - tlen, tlen)(1) assign ctx, pthat.parallelIterator))
+ executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter))
}
- } otherwise super.endsWith(that)
+ } otherwise seq.endsWith(that)
- override def patch[U >: T, That](from: Int, patch: Seq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
val realreplaced = replaced min (length - from)
if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
val that = patch.asParSeq
val pbf = bf.asParallel
- val pits = parallelIterator.psplit(from, replaced, length - from - realreplaced)
+ val pits = splitter.psplit(from, replaced, length - from - realreplaced)
val copystart = new Copy[U, That](() => pbf(repr), pits(0))
val copymiddle = wrap {
- val tsk = new that.Copy[U, That](() => pbf(repr), that.parallelIterator)
+ val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter)
tasksupport.executeAndWaitResult(tsk)
}
val copyend = new Copy[U, That](() => pbf(repr), pits(2))
executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
_.result
})
- } else patch_sequential(from, patch, replaced)
+ } else patch_sequential(from, patch.seq, replaced)
}
private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
val from = 0 max fromarg
val b = bf(repr)
val repl = (r min (length - from)) max 0
- val pits = parallelIterator.psplit(from, repl, length - from - repl)
+ val pits = splitter.psplit(from, repl, length - from - repl)
b ++= pits(0)
- b ++= patch.iterator
+ b ++= patch
b ++= pits(2)
b.result
}
- override def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Updated(index, elem, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.updated(index, elem)
+ def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result })
+ } otherwise seq.updated(index, elem)(bf2seq(bf))
- override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
patch(0, mutable.ParArray(elem), 0)
}
- override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
patch(length, mutable.ParArray(elem), 0)
}
- override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
+ def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
patch(length, new immutable.Repetition(elem, len - length), 0)
} else patch(length, Nil, 0);
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(length min thatseq.length, pbf, parallelIterator, thatseq.parallelIterator) mapResult { _.result });
+ executeAndWaitResult(new Zip(length min thatseq.length, pbf, splitter, thatseq.splitter) mapResult { _.result });
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
@@ -307,10 +295,46 @@ self =>
* `p(x, y)` is `true` for all corresponding elements `x` of this $coll
* and `y` of `that`, otherwise `false`
*/
- override def corresponds[S](that: Seq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat =>
+ def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new Corresponds(p, parallelIterator assign ctx, pthat.parallelIterator))
- } otherwise super.corresponds(that)(p)
+ length == pthat.length && executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter))
+ } otherwise seq.corresponds(that)(p)
+
+ def diff[U >: T](that: GenSeq[U]): Repr = sequentially {
+ _ diff that
+ }
+
+ /** Computes the multiset intersection between this $coll and another sequence.
+ * $mayNotTerminateInf
+ *
+ * @param that the sequence of elements to intersect with.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ * @usecase def intersect(that: Seq[T]): $Coll[T]
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ */
+ def intersect[U >: T](that: GenSeq[U]) = sequentially {
+ _ intersect that
+ }
+
+ /** Builds a new $coll from this $coll without any duplicate elements.
+ * $willNotTerminateInf
+ *
+ * @return A new $coll which contains the first occurrence of every element of this $coll.
+ */
+ def distinct: Repr = sequentially {
+ _.distinct
+ }
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
@@ -318,25 +342,25 @@ self =>
override def view = new ParSeqView[T, Repr, Sequential] {
protected lazy val underlying = self.repr
+ protected[this] def viewIdentifier = ""
+ protected[this] def viewIdString = ""
def length = self.length
def apply(idx: Int) = self(idx)
override def seq = self.seq.view
- def parallelIterator = self.parallelIterator
+ def splitter = self.splitter
}
- override def view(from: Int, until: Int) = view.slice(from, until)
-
/* tasks */
- protected[this] def down(p: ParIterableIterator[_]) = p.asInstanceOf[ParSeqIterator[T]]
+ protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]]
protected trait Accessor[R, Tp] extends super.Accessor[R, Tp] {
- protected[this] val pit: ParSeqIterator[T]
+ protected[this] val pit: SeqSplitter[T]
}
protected trait Transformer[R, Tp] extends Accessor[R, Tp] with super.Transformer[R, Tp]
- protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: ParSeqIterator[T])
+ protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T])
extends Accessor[(Int, Boolean), SegmentLength] {
@volatile var result: (Int, Boolean) = null
def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) {
@@ -354,7 +378,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: ParSeqIterator[T])
+ protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T])
extends Accessor[Int, IndexWhere] {
@volatile var result: Int = -1
def leaf(prev: Option[Int]) = if (from < pit.indexFlag) {
@@ -375,7 +399,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: ParSeqIterator[T])
+ protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: SeqSplitter[T])
extends Accessor[Int, LastIndexWhere] {
@volatile var result: Int = -1
def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) {
@@ -396,7 +420,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, This], Reverse[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf()))
@@ -404,7 +428,7 @@ self =>
override def merge(that: Reverse[U, This]) = result = that.result combine result
}
- protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[S, That], ReverseMap[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf(self.repr))
@@ -412,7 +436,7 @@ self =>
override def merge(that: ReverseMap[S, That]) = result = that.result combine result
}
- protected[this] class SameElements[U >: T](protected[this] val pit: ParSeqIterator[T], val otherpit: PreciseSplitter[U])
+ protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U])
extends Accessor[Boolean, SameElements[U]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -429,7 +453,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf(self.repr))
@@ -442,7 +466,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](len: Int, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: ParSeqIterator[T], val otherpit: ParSeqIterator[S])
+ protected[this] class Zip[U >: T, S, That](len: Int, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, pbf(self.repr))
@@ -460,7 +484,7 @@ self =>
override def merge(that: Zip[U, S, That]) = result = result combine that.result
}
- protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: ParSeqIterator[T], val otherpit: PreciseSplitter[S])
+ protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S])
extends Accessor[Boolean, Corresponds[S]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
index 6ac24668ca..8461390839 100644
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ b/src/library/scala/collection/parallel/ParSeqView.scala
@@ -36,7 +36,7 @@ extends ParSeqViewLike[T, Coll, CollSeq, ParSeqView[T, Coll, CollSeq], SeqView[T
object ParSeqView {
abstract class NoCombiner[T] extends Combiner[T, Nothing] {
- self: EnvironmentPassingCombiner[T, Nothing] =>
+// self: EnvironmentPassingCombiner[T, Nothing] =>
def +=(elem: T): this.type = this
def iterator: Iterator[T] = Iterator.empty
def result() = throw new UnsupportedOperationException("ParSeqView.Combiner.result")
@@ -50,8 +50,8 @@ object ParSeqView {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeqView[T, ParSeq[T], Seq[T]]] =
new CanCombineFrom[Coll, T, ParSeqView[T, ParSeq[T], Seq[T]]] {
- def apply(from: Coll) = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
- def apply() = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
+ def apply(from: Coll) = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
+ def apply() = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
}
}
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 3b57e2009d..b7ad227f11 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -10,6 +10,11 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.{ SeqView, SeqViewLike }
+import scala.collection.{ GenSeqView, GenSeqViewLike }
+import scala.collection.GenSeq
+import scala.collection.GenIterable
+import scala.collection.GenTraversable
+import scala.collection.GenTraversableOnce
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
@@ -29,8 +34,8 @@ trait ParSeqViewLike[+T,
+CollSeq,
+This <: ParSeqView[T, Coll, CollSeq] with ParSeqViewLike[T, Coll, CollSeq, This, ThisSeq],
+ThisSeq <: SeqView[T, CollSeq] with SeqViewLike[T, CollSeq, ThisSeq]]
-extends SeqView[T, Coll]
- with SeqViewLike[T, Coll, This]
+extends GenSeqView[T, Coll]
+ with GenSeqViewLike[T, Coll, This]
with ParIterableView[T, Coll, CollSeq]
with ParIterableViewLike[T, Coll, CollSeq, This, ThisSeq]
with ParSeq[T]
@@ -40,52 +45,54 @@ self =>
import tasksupport._
trait Transformed[+S] extends ParSeqView[S, Coll, CollSeq]
- with super[ParIterableView].Transformed[S] with super[SeqView].Transformed[S] {
- override def parallelIterator: ParSeqIterator[S]
- override def iterator = parallelIterator
+ with super[ParIterableView].Transformed[S] with super[GenSeqViewLike].Transformed[S] {
+ override def splitter: SeqSplitter[S]
+ override def iterator = splitter
+ override def size = length
}
- trait Sliced extends super[SeqViewLike].Sliced with super[ParIterableViewLike].Sliced with Transformed[T] {
+ trait Sliced extends super[GenSeqViewLike].Sliced with super[ParIterableViewLike].Sliced with Transformed[T] {
// override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
- override def parallelIterator = self.parallelIterator.psplit(from, until - from)(1)
+ override def splitter = self.splitter.psplit(from, until - from)(1)
+ override def seq = self.seq.slice(from, until)
}
- trait Mapped[S] extends super[SeqViewLike].Mapped[S] with super[ParIterableViewLike].Mapped[S] with Transformed[S] {
- override def parallelIterator = self.parallelIterator.map(mapping)
+ trait Mapped[S] extends super[GenSeqViewLike].Mapped[S] with super[ParIterableViewLike].Mapped[S] with Transformed[S] {
+ override def splitter = self.splitter.map(mapping)
override def seq = self.seq.map(mapping).asInstanceOf[SeqView[S, CollSeq]]
}
- trait Appended[U >: T] extends super[SeqViewLike].Appended[U] with super[ParIterableViewLike].Appended[U] with Transformed[U] {
+ trait Appended[U >: T] extends super[GenSeqViewLike].Appended[U] with super[ParIterableViewLike].Appended[U] with Transformed[U] {
override def restPar: ParSeq[U] = rest.asParSeq
- override def parallelIterator = self.parallelIterator.appendParSeq[U, ParSeqIterator[U]](restPar.parallelIterator)
+ override def splitter = self.splitter.appendParSeq[U, SeqSplitter[U]](restPar.splitter)
override def seq = self.seq.++(rest).asInstanceOf[SeqView[U, CollSeq]]
}
- trait Forced[S] extends super[SeqViewLike].Forced[S] with super[ParIterableViewLike].Forced[S] with Transformed[S] {
+ trait Forced[S] extends super[GenSeqViewLike].Forced[S] with super[ParIterableViewLike].Forced[S] with Transformed[S] {
override def forcedPar: ParSeq[S] = forced.asParSeq
- override def parallelIterator: ParSeqIterator[S] = forcedPar.parallelIterator
+ override def splitter: SeqSplitter[S] = forcedPar.splitter
override def seq = forcedPar.seq.view.asInstanceOf[SeqView[S, CollSeq]]
}
- trait Zipped[S] extends super[SeqViewLike].Zipped[S] with super[ParIterableViewLike].Zipped[S] with Transformed[(T, S)] {
- override def parallelIterator = self.parallelIterator zipParSeq otherPar.parallelIterator
+ trait Zipped[S] extends super[GenSeqViewLike].Zipped[S] with super[ParIterableViewLike].Zipped[S] with Transformed[(T, S)] {
+ override def splitter = self.splitter zipParSeq otherPar.splitter
override def seq = (self.seq zip other).asInstanceOf[SeqView[(T, S), CollSeq]]
}
- trait ZippedAll[U >: T, S] extends super[SeqViewLike].ZippedAll[U, S] with super[ParIterableViewLike].ZippedAll[U, S] with Transformed[(U, S)] {
- override def parallelIterator: ParSeqIterator[(U, S)] = self.parallelIterator.zipAllParSeq(otherPar.parallelIterator, thisElem, thatElem)
+ trait ZippedAll[U >: T, S] extends super[GenSeqViewLike].ZippedAll[U, S] with super[ParIterableViewLike].ZippedAll[U, S] with Transformed[(U, S)] {
+ override def splitter: SeqSplitter[(U, S)] = self.splitter.zipAllParSeq(otherPar.splitter, thisElem, thatElem)
override def seq = (self.seq.zipAll(other, thisElem, thatElem)).asInstanceOf[SeqView[(U, S), CollSeq]]
}
trait Reversed extends super.Reversed with Transformed[T] {
- override def parallelIterator: ParSeqIterator[T] = self.parallelIterator.reverse
+ override def splitter: SeqSplitter[T] = self.splitter.reverse
override def seq = self.seq.reverse.asInstanceOf[SeqView[T, CollSeq]]
}
// use only with ParSeq patches, otherwise force
trait Patched[U >: T] extends super.Patched[U] with Transformed[U] {
def patchPar: ParSeq[U] = patch.asInstanceOf[ParSeq[U]]
- override def parallelIterator: ParSeqIterator[U] = self.parallelIterator.patchParSeq[U](from, patchPar.parallelIterator, replaced)
+ override def splitter: SeqSplitter[U] = self.splitter.patchParSeq[U](from, patchPar.splitter, replaced)
override def seq = self.seq.patch(from, patch, replaced).asInstanceOf[SeqView[U, CollSeq]]
}
@@ -97,29 +104,29 @@ self =>
/* wrapper virtual ctors */
protected override def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
- protected override def newAppended[U >: T](that: Traversable[U]): Transformed[U] = {
+ protected override def newAppended[U >: T](that: GenTraversable[U]): Transformed[U] = {
// we only append if `that` is a parallel sequence, i.e. it has a precise splitter
if (that.isParSeq) new Appended[U] { val rest = that }
else newForced(mutable.ParArray.fromTraversables(this, that))
}
- protected override def newForced[S](xs: => Seq[S]): Transformed[S] = {
+ protected override def newForced[S](xs: => GenSeq[S]): Transformed[S] = {
if (xs.isParSeq) new Forced[S] { val forced = xs }
else new Forced[S] { val forced = mutable.ParArray.fromTraversables(xs) }
}
protected override def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
- protected override def newZipped[S](that: Iterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
- protected override def newZippedAll[U >: T, S](that: Iterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
+ protected override def newZipped[S](that: GenIterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
+ protected override def newZippedAll[U >: T, S](that: GenIterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
}
- protected override def newReversed: Transformed[T] = new Reversed { }
- protected override def newPatched[U >: T](_from: Int, _patch: Seq[U], _replaced: Int): Transformed[U] = new {
+ protected def newReversed: Transformed[T] = new Reversed { }
+ protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new {
val from = _from;
val patch = _patch;
val replaced = _replaced
} with Patched[U]
- protected override def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
+ protected def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
/* operation overrides */
@@ -130,15 +137,15 @@ self =>
override def splitAt(n: Int): (This, This) = (take(n), drop(n))
/* appended */
- override def ++[U >: T, That](xs: TraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppended(xs.toTraversable).asInstanceOf[That]
+ override def ++[U >: T, That](xs: GenTraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppended(xs.toTraversable).asInstanceOf[That]
override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = ++(Iterator.single(elem))(bf)
- override def union[U >: T, That](that: Seq[U])(implicit bf: CanBuildFrom[This, U, That]): That = this ++ that
+ //override def union[U >: T, That](that: GenSeq[U])(implicit bf: CanBuildFrom[This, U, That]): That = this ++ that
/* misc */
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, parallelIterator.remaining, 1, false)).asInstanceOf[That]
+ newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
override def reverse: This = newReversed.asInstanceOf[This]
override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f)
@@ -149,18 +156,18 @@ self =>
}
override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(length, Seq.fill(len - length)(elem), 0)
override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(0, mutable.ParArray.fromTraversables(Iterator.single(elem)), 0)
- override def patch[U >: T, That](from: Int, patch: Seq[U], replace: Int)(implicit bf: CanBuildFrom[This, U, That]): That = newPatched(from, patch, replace).asInstanceOf[That]
+ override def patch[U >: T, That](from: Int, patch: GenSeq[U], replace: Int)(implicit bf: CanBuildFrom[This, U, That]): That = newPatched(from, patch, replace).asInstanceOf[That]
/* forced */
- override def diff[U >: T](that: Seq[U]): This = newForced(thisParSeq diff that).asInstanceOf[This]
- override def intersect[U >: T](that: Seq[U]): This = newForced(thisParSeq intersect that).asInstanceOf[This]
- override def sorted[U >: T](implicit ord: Ordering[U]): This = newForced(thisParSeq sorted ord).asInstanceOf[This]
+ // override def diff[U >: T](that: GenSeq[U]): This = newForced(thisParSeq diff that).asInstanceOf[This]
+ // override def intersect[U >: T](that: GenSeq[U]): This = newForced(thisParSeq intersect that).asInstanceOf[This]
+ // override def sorted[U >: T](implicit ord: Ordering[U]): This = newForced(thisParSeq sorted ord).asInstanceOf[This]
override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[This, S, That]): That = filter(pf.isDefinedAt).map(pf)(bf)
override def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanLeft(z)(op)).asInstanceOf[That]
override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanRight(z)(op)).asInstanceOf[That]
override def groupBy[K](f: T => K): immutable.ParMap[K, This] = thisParSeq.groupBy(f).map(kv => (kv._1, newForced(kv._2).asInstanceOf[This]))
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, parallelIterator).mapResult(_.result).asInstanceOf[Task[That, _]])
+ executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
@@ -169,7 +176,7 @@ self =>
/* tasks */
- protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, That], Force[U, That]] {
var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index f3dab8faa9..151433405e 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -16,6 +16,7 @@ package scala.collection.parallel
import scala.collection.Set
+import scala.collection.GenSet
import scala.collection.mutable.Builder
import scala.collection.generic._
@@ -34,7 +35,7 @@ import scala.collection.generic._
* @since 2.9
*/
trait ParSet[T]
-extends Set[T]
+extends GenSet[T]
with GenericParTemplate[T, ParSet]
with ParIterable[T]
with ParSetLike[T, ParSet[T], Set[T]]
@@ -42,6 +43,8 @@ extends Set[T]
self =>
override def empty: ParSet[T] = mutable.ParHashSet[T]()
+ //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T]
+
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def stringPrefix = "ParSet"
@@ -83,19 +86,3 @@ object ParSet extends ParSetFactory[ParSet] {
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 9e769f425b..3728158c27 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -12,6 +12,8 @@ package scala.collection.parallel
import scala.collection.SetLike
+import scala.collection.GenSetLike
+import scala.collection.GenSet
import scala.collection.Set
import scala.collection.mutable.Builder
@@ -35,49 +37,23 @@ import scala.collection.mutable.Builder
trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: Set[T] with SetLike[T, Sequential]]
-extends SetLike[T, Repr]
+extends GenSetLike[T, Repr]
with ParIterableLike[T, Repr, Sequential]
{ self =>
- protected[this] override def newBuilder: Builder[T, Repr] = newCombiner
-
- protected[this] override def newCombiner: Combiner[T, Repr]
-
- override def empty: Repr
+ def empty: Repr
// note: should not override toSet (could be mutable)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ def union(that: GenSet[T]): Repr = sequentially {
+ _ union that
+ }
+ def diff(that: GenSet[T]): Repr = sequentially {
+ _ diff that
+ }
+}
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
new file mode 100644
index 0000000000..6a652bbeca
--- /dev/null
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -0,0 +1,64 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel
+
+
+import scala.collection.Seq
+
+
+/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
+ * that traverse disjoint subsets of arbitrary sizes.
+ *
+ * Implementors might want to override the parameterless `split` method for efficiency.
+ *
+ * @tparam T type of the elements this splitter traverses
+ *
+ * @since 2.9
+ * @author Aleksandar Prokopec
+ */
+trait PreciseSplitter[+T] extends Splitter[T] {
+
+ /** Splits the splitter into disjunct views.
+ *
+ * This overloaded version of the `split` method is specific to precise splitters.
+ * It returns a sequence of splitters, each iterating some subset of the
+ * elements in this splitter. The sizes of the subsplitters in the partition is equal to
+ * the size in the corresponding argument, as long as there are enough elements in this
+ * splitter to split it that way.
+ *
+ * If there aren't enough elements, a zero element splitter is appended for each additional argument.
+ * If there are additional elements, an additional splitter is appended at the end to compensate.
+ *
+ * For example, say we have a splitter `ps` with 100 elements. Invoking:
+ * {{{
+ * ps.split(50, 25, 25, 10, 5)
+ * }}}
+ * will return a sequence of five splitters, last two views being empty. On the other hand, calling:
+ * {{{
+ * ps.split(50, 40)
+ * }}}
+ * will return a sequence of three splitters, last of them containing ten elements.
+ *
+ * '''Note:''' this method actually invalidates the current splitter.
+ *
+ * Unlike the case with `split` found in splitters, views returned by this method can be empty.
+ *
+ * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets
+ * @return a sequence of disjunct subsequence iterators of this parallel iterator
+ */
+ def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
+
+ def split: Seq[PreciseSplitter[T]]
+
+}
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 508bc46a72..e04e0e9c72 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -17,6 +17,7 @@ import scala.collection.generic.DelegatedSignalling
import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
import scala.collection.Iterator.empty
+import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
@@ -45,13 +46,13 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
i
}
- def reduce[U >: T](op: (U, U) => U): U = {
+ override def reduce[U >: T](op: (U, U) => U): U = {
var r: U = next
while (hasNext) r = op(r, next)
r
}
- def fold[U >: T](z: U)(op: (U, U) => U): U = {
+ override def fold[U >: T](z: U)(op: (U, U) => U): U = {
var r = z
while (hasNext) r = op(r, next)
r
@@ -124,10 +125,10 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
cb
}
- def flatmap2combiner[S, That](f: T => TraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
+ def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
while (hasNext) {
- val traversable = f(next)
+ val traversable = f(next).seq
if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
else cb ++= traversable
}
@@ -279,7 +280,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
-trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] {
+private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] {
/** The exact number of elements this iterator has yet to iterate.
* This method doesn't change the state of the iterator.
@@ -372,7 +373,7 @@ trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] {
*
* @param T type of the elements iterated.
*/
-trait ParIterableIterator[+T]
+trait IterableSplitter[+T]
extends AugmentedIterableIterator[T]
with Splitter[T]
with Signalling
@@ -381,9 +382,9 @@ extends AugmentedIterableIterator[T]
self =>
/** Creates a copy of this iterator. */
- def dup: ParIterableIterator[T]
+ def dup: IterableSplitter[T]
- def split: Seq[ParIterableIterator[T]]
+ def split: Seq[IterableSplitter[T]]
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
@@ -419,14 +420,14 @@ self =>
/* iterator transformers */
- class Taken(taken: Int) extends ParIterableIterator[T] {
+ class Taken(taken: Int) extends IterableSplitter[T] {
var signalDelegate = self.signalDelegate
var remaining = taken min self.remaining
def hasNext = remaining > 0
def next = { remaining -= 1; self.next }
- def dup: ParIterableIterator[T] = self.dup.take(taken)
- def split: Seq[ParIterableIterator[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
- protected[this] def takeSeq[PI <: ParIterableIterator[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
+ def dup: IterableSplitter[T] = self.dup.take(taken)
+ def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
+ protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
val sizes = sq.scanLeft(0)(_ + _.remaining)
val shortened = for ((it, (from, until)) <- sq zip (sizes.init zip sizes.tail)) yield
if (until < remaining) it else taker(it, remaining - from)
@@ -445,23 +446,23 @@ self =>
}
it
}
- override def take(n: Int): ParIterableIterator[T] = newTaken(n)
- override def slice(from1: Int, until1: Int): ParIterableIterator[T] = newSliceInternal(newTaken(until1), from1)
+ override def take(n: Int): IterableSplitter[T] = newTaken(n)
+ override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
- class Mapped[S](f: T => S) extends ParIterableIterator[S] {
+ class Mapped[S](f: T => S) extends IterableSplitter[S] {
var signalDelegate = self.signalDelegate
def hasNext = self.hasNext
def next = f(self.next)
def remaining = self.remaining
- def dup: ParIterableIterator[S] = self.dup map f
- def split: Seq[ParIterableIterator[S]] = self.split.map { _ map f }
+ def dup: IterableSplitter[S] = self.dup map f
+ def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
}
override def map[S](f: T => S) = new Mapped(f)
- class Appended[U >: T, PI <: ParIterableIterator[U]](protected val that: PI) extends ParIterableIterator[U] {
+ class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
var signalDelegate = self.signalDelegate
- protected var curr: ParIterableIterator[U] = self
+ protected var curr: IterableSplitter[U] = self
def hasNext = if (curr.hasNext) true else if (curr eq self) {
curr = that
curr.hasNext
@@ -472,19 +473,19 @@ self =>
} else curr.next
def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining
protected def firstNonEmpty = (curr eq self) && curr.hasNext
- def dup: ParIterableIterator[U] = self.dup.appendParIterable[U, PI](that)
- def split: Seq[ParIterableIterator[U]] = if (firstNonEmpty) Seq(curr, that) else curr.split
+ def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that)
+ def split: Seq[IterableSplitter[U]] = if (firstNonEmpty) Seq(curr, that) else curr.split
}
- def appendParIterable[U >: T, PI <: ParIterableIterator[U]](that: PI) = new Appended[U, PI](that)
+ def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that)
- class Zipped[S](protected val that: ParSeqIterator[S]) extends ParIterableIterator[(T, S)] {
+ class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
var signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
def remaining = self.remaining min that.remaining
- def dup: ParIterableIterator[(T, S)] = self.dup.zipParSeq(that)
- def split: Seq[ParIterableIterator[(T, S)]] = {
+ def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that)
+ def split: Seq[IterableSplitter[(T, S)]] = {
val selfs = self.split
val sizes = selfs.map(_.remaining)
val thats = that.psplit(sizes: _*)
@@ -492,10 +493,10 @@ self =>
}
}
- def zipParSeq[S](that: ParSeqIterator[S]) = new Zipped(that)
+ def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that)
- class ZippedAll[U >: T, S](protected val that: ParSeqIterator[S], protected val thiselem: U, protected val thatelem: S)
- extends ParIterableIterator[(U, S)] {
+ class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
+ extends IterableSplitter[(U, S)] {
var signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
@@ -503,18 +504,18 @@ self =>
else (self.next, thatelem)
} else (thiselem, that.next);
def remaining = self.remaining max that.remaining
- def dup: ParIterableIterator[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
- def split: Seq[ParIterableIterator[(U, S)]] = {
+ def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
+ def split: Seq[IterableSplitter[(U, S)]] = {
val selfrem = self.remaining
val thatrem = that.remaining
- val thisit = if (selfrem < thatrem) self.appendParIterable[U, ParSeqIterator[U]](repetition[U](thiselem, thatrem - selfrem).parallelIterator) else self
- val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).parallelIterator) else that
+ val thisit = if (selfrem < thatrem) self.appendParIterable[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self
+ val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that
val zipped = thisit zipParSeq thatit
zipped.split
}
}
- def zipAllParSeq[S, U >: T, R >: S](that: ParSeqIterator[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
+ def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
}
@@ -523,15 +524,15 @@ self =>
*
* @param T type of the elements iterated.
*/
-trait ParSeqIterator[+T]
-extends ParIterableIterator[T]
+trait SeqSplitter[+T]
+extends IterableSplitter[T]
with AugmentedSeqIterator[T]
with PreciseSplitter[T]
{
self =>
- def dup: ParSeqIterator[T]
- def split: Seq[ParSeqIterator[T]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[T]]
+ def dup: SeqSplitter[T]
+ def split: Seq[SeqSplitter[T]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[T]]
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
@@ -544,27 +545,27 @@ self =>
/* iterator transformers */
- class Taken(tk: Int) extends super.Taken(tk) with ParSeqIterator[T] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[T]]
- override def split: Seq[ParSeqIterator[T]] = super.split.asInstanceOf[Seq[ParSeqIterator[T]]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[T]] = takeSeq(self.psplit(sizes: _*)) { (p, n) => p.take(n) }
+ class Taken(tk: Int) extends super.Taken(tk) with SeqSplitter[T] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[T]]
+ override def split: Seq[SeqSplitter[T]] = super.split.asInstanceOf[Seq[SeqSplitter[T]]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[T]] = takeSeq(self.psplit(sizes: _*)) { (p, n) => p.take(n) }
}
override private[collection] def newTaken(until: Int): Taken = new Taken(until)
- override def take(n: Int): ParSeqIterator[T] = newTaken(n)
- override def slice(from1: Int, until1: Int): ParSeqIterator[T] = newSliceInternal(newTaken(until1), from1)
+ override def take(n: Int): SeqSplitter[T] = newTaken(n)
+ override def slice(from1: Int, until1: Int): SeqSplitter[T] = newSliceInternal(newTaken(until1), from1)
- class Mapped[S](f: T => S) extends super.Mapped[S](f) with ParSeqIterator[S] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[S]]
- override def split: Seq[ParSeqIterator[S]] = super.split.asInstanceOf[Seq[ParSeqIterator[S]]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[S]] = self.psplit(sizes: _*).map { _ map f }
+ class Mapped[S](f: T => S) extends super.Mapped[S](f) with SeqSplitter[S] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[S]]
+ override def split: Seq[SeqSplitter[S]] = super.split.asInstanceOf[Seq[SeqSplitter[S]]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[S]] = self.psplit(sizes: _*).map { _ map f }
}
override def map[S](f: T => S) = new Mapped(f)
- class Appended[U >: T, PI <: ParSeqIterator[U]](it: PI) extends super.Appended[U, PI](it) with ParSeqIterator[U] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[U]]
- override def split: Seq[ParSeqIterator[U]] = super.split.asInstanceOf[Seq[ParSeqIterator[U]]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[U]] = if (firstNonEmpty) {
+ class Appended[U >: T, PI <: SeqSplitter[U]](it: PI) extends super.Appended[U, PI](it) with SeqSplitter[U] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[U]]
+ override def split: Seq[SeqSplitter[U]] = super.split.asInstanceOf[Seq[SeqSplitter[U]]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[U]] = if (firstNonEmpty) {
val selfrem = self.remaining
// split sizes
@@ -585,56 +586,56 @@ self =>
val thats = that.psplit(thatsizes: _*)
// appended last in self with first in rest if necessary
- if (appendMiddle) selfs.init ++ Seq(selfs.last.appendParSeq[U, ParSeqIterator[U]](thats.head)) ++ thats.tail
+ if (appendMiddle) selfs.init ++ Seq(selfs.last.appendParSeq[U, SeqSplitter[U]](thats.head)) ++ thats.tail
else selfs ++ thats
- } else curr.asInstanceOf[ParSeqIterator[U]].psplit(sizes: _*)
+ } else curr.asInstanceOf[SeqSplitter[U]].psplit(sizes: _*)
}
- def appendParSeq[U >: T, PI <: ParSeqIterator[U]](that: PI) = new Appended[U, PI](that)
+ def appendParSeq[U >: T, PI <: SeqSplitter[U]](that: PI) = new Appended[U, PI](that)
- class Zipped[S](ti: ParSeqIterator[S]) extends super.Zipped[S](ti) with ParSeqIterator[(T, S)] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[(T, S)]]
- override def split: Seq[ParSeqIterator[(T, S)]] = super.split.asInstanceOf[Seq[ParSeqIterator[(T, S)]]]
+ class Zipped[S](ti: SeqSplitter[S]) extends super.Zipped[S](ti) with SeqSplitter[(T, S)] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[(T, S)]]
+ override def split: Seq[SeqSplitter[(T, S)]] = super.split.asInstanceOf[Seq[SeqSplitter[(T, S)]]]
def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => p._1 zipParSeq p._2 }
}
- override def zipParSeq[S](that: ParSeqIterator[S]) = new Zipped(that)
+ override def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that)
- class ZippedAll[U >: T, S](ti: ParSeqIterator[S], thise: U, thate: S) extends super.ZippedAll[U, S](ti, thise, thate) with ParSeqIterator[(U, S)] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[(U, S)]]
+ class ZippedAll[U >: T, S](ti: SeqSplitter[S], thise: U, thate: S) extends super.ZippedAll[U, S](ti, thise, thate) with SeqSplitter[(U, S)] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[(U, S)]]
private def patchem = {
val selfrem = self.remaining
val thatrem = that.remaining
- val thisit = if (selfrem < thatrem) self.appendParSeq[U, ParSeqIterator[U]](repetition[U](thiselem, thatrem - selfrem).parallelIterator) else self
- val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).parallelIterator) else that
+ val thisit = if (selfrem < thatrem) self.appendParSeq[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self
+ val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that
(thisit, thatit)
}
- override def split: Seq[ParSeqIterator[(U, S)]] = {
+ override def split: Seq[SeqSplitter[(U, S)]] = {
val (thisit, thatit) = patchem
val zipped = thisit zipParSeq thatit
zipped.split
}
- def psplit(sizes: Int*): Seq[ParSeqIterator[(U, S)]] = {
+ def psplit(sizes: Int*): Seq[SeqSplitter[(U, S)]] = {
val (thisit, thatit) = patchem
val zipped = thisit zipParSeq thatit
zipped.psplit(sizes: _*)
}
}
- override def zipAllParSeq[S, U >: T, R >: S](that: ParSeqIterator[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
+ override def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
- def reverse: ParSeqIterator[T] = {
+ def reverse: SeqSplitter[T] = {
val pa = mutable.ParArray.fromTraversables(self).reverse
new pa.ParArrayIterator with pa.SCPI {
override def reverse = self
}
}
- class Patched[U >: T](from: Int, patch: ParSeqIterator[U], replaced: Int) extends ParSeqIterator[U] {
+ class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] {
var signalDelegate = self.signalDelegate
private[this] val trio = {
val pits = self.psplit(from, replaced, self.remaining - from - replaced)
- (pits(0).appendParSeq[U, ParSeqIterator[U]](patch)) appendParSeq pits(2)
+ (pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2)
}
def hasNext = trio.hasNext
def next = trio.next
@@ -644,7 +645,7 @@ self =>
def psplit(sizes: Int*) = trio.psplit(sizes: _*)
}
- def patchParSeq[U >: T](from: Int, patchElems: ParSeqIterator[U], replaced: Int) = new Patched(from, patchElems, replaced)
+ def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced)
}
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index 7328f3db6c..568a7c8e0d 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.collection.parallel
@@ -63,53 +62,3 @@ object Splitter {
}
-/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
- * that traverse disjoint subsets of arbitrary sizes.
- *
- * Implementors might want to override the parameterless `split` method for efficiency.
- *
- * @tparam T type of the elements this splitter traverses
- *
- * @since 2.9
- * @author Aleksandar Prokopec
- */
-trait PreciseSplitter[+T] extends Splitter[T] {
-
- /** Splits the splitter into disjunct views.
- *
- * This overloaded version of the `split` method is specific to precise splitters.
- * It returns a sequence of splitters, each iterating some subset of the
- * elements in this splitter. The sizes of the subsplitters in the partition is equal to
- * the size in the corresponding argument, as long as there are enough elements in this
- * splitter to split it that way.
- *
- * If there aren't enough elements, a zero element splitter is appended for each additional argument.
- * If there are additional elements, an additional splitter is appended at the end to compensate.
- *
- * For example, say we have a splitter `ps` with 100 elements. Invoking:
- * {{{
- * ps.split(50, 25, 25, 10, 5)
- * }}}
- * will return a sequence of five splitters, last two views being empty. On the other hand, calling:
- * {{{
- * ps.split(50, 40)
- * }}}
- * will return a sequence of three splitters, last of them containing ten elements.
- *
- * '''Note:''' this method actually invalidates the current splitter.
- *
- * Unlike the case with `split` found in splitters, views returned by this method can be empty.
- *
- * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets
- * @return a sequence of disjunct subsequence iterators of this parallel iterator
- */
- def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
-
- def split: Seq[PreciseSplitter[T]]
-
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 80cdd31fa1..497e0c638a 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -44,7 +44,7 @@ trait Tasks {
def leaf(result: Option[R])
/** A result that can be accessed once the task is completed. */
- @volatile var result: R
+ var result: R
/** Decides whether or not this task should be split further. */
def shouldSplitFurther: Boolean
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 11d4a02e18..b9b7cbd69d 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -6,32 +6,20 @@
** |/ **
\* */
-
package scala.collection.parallel.immutable
-
-
-
-
-
-
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParIterableIterator
-import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.IterableSplitter
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.mutable.UnrolledBuffer
import scala.collection.generic.ParMapFactory
import scala.collection.generic.CanCombineFrom
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
-import scala.collection.immutable.HashMap
-
-
+import scala.collection.immutable.{ HashMap, TrieIterator }
import annotation.unchecked.uncheckedVariance
-
-
/** Immutable parallel hash map, based on hash tries.
*
* $paralleliterableinfo
@@ -64,7 +52,7 @@ self =>
protected[this] override def newCombiner = HashMapCombiner[K, V]
- def parallelIterator: ParIterableIterator[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
override def seq = trie
@@ -88,9 +76,8 @@ self =>
self: SignalContextPassingIterator[ParHashMapIterator] =>
var i = 0
def dup = triter match {
- case t: HashMap.TrieIterator[_, _] =>
- val dupt = t.dupIterator.asInstanceOf[Iterator[(K, V)]]
- dupFromIterator(dupt)
+ case t: TrieIterator[_] =>
+ dupFromIterator(t.dupIterator)
case _ =>
val buff = triter.toBuffer
triter = buff.iterator
@@ -102,9 +89,9 @@ self =>
phit
}
def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
- case t: HashMap.TrieIterator[_, _] =>
+ case t: TrieIterator[_] =>
val previousRemaining = remaining
- val ((fst, fstlength), snd) = t.asInstanceOf[HashMap.TrieIterator[K, V]].split
+ val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
new ParHashMapIterator(fst, fstlength) with SCPI,
@@ -167,9 +154,9 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[parallel] abstract class HashMapCombiner[K, V]
extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
-self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
+//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
import HashMapCombiner._
- import tasksupport._
+ import collection.parallel.tasksupport._
val emptyTrie = HashMap.empty[K, V]
def +=(elem: (K, V)) = {
@@ -337,7 +324,7 @@ self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
private[parallel] object HashMapCombiner {
- def apply[K, V] = new HashMapCombiner[K, V] with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
+ def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
private[immutable] val rootbits = 5
private[immutable] val rootsize = 1 << 5
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index e292a3ef72..e3c408e4db 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -6,19 +6,11 @@
** |/ **
\* */
-
package scala.collection.parallel.immutable
-
-
-
-
-
-
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParIterableIterator
-import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.IterableSplitter
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.mutable.UnrolledBuffer
import scala.collection.generic.ParSetFactory
@@ -26,12 +18,7 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericCompanion
-import scala.collection.immutable.HashSet
-
-
-
-
-
+import scala.collection.immutable.{ HashSet, TrieIterator }
/** Immutable parallel hash set, based on hash tries.
*
@@ -62,7 +49,7 @@ self =>
override def empty: ParHashSet[T] = new ParHashSet[T]
- def parallelIterator: ParIterableIterator[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
override def seq = trie
@@ -86,9 +73,8 @@ self =>
self: SignalContextPassingIterator[ParHashSetIterator] =>
var i = 0
def dup = triter match {
- case t: HashSet.TrieIterator[_] =>
- val dupt = t.dupIterator.asInstanceOf[Iterator[T]]
- dupFromIterator(dupt)
+ case t: TrieIterator[_] =>
+ dupFromIterator(t.dupIterator)
case _ =>
val buff = triter.toBuffer
triter = buff.iterator
@@ -100,9 +86,9 @@ self =>
phit
}
def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
- case t: HashSet.TrieIterator[_] =>
+ case t: TrieIterator[_] =>
val previousRemaining = remaining
- val ((fst, fstlength), snd) = t.asInstanceOf[HashSet.TrieIterator[T]].split
+ val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
new ParHashSetIterator(fst, fstlength) with SCPI,
@@ -143,9 +129,9 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[immutable] abstract class HashSetCombiner[T]
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
-self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
+//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
import HashSetCombiner._
- import tasksupport._
+ import collection.parallel.tasksupport._
val emptyTrie = HashSet.empty[T]
def +=(elem: T) = {
@@ -227,7 +213,7 @@ self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
object HashSetCombiner {
- def apply[T] = new HashSetCombiner[T] with EnvironmentPassingCombiner[T, ParHashSet[T]] {}
+ def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {}
private[immutable] val rootbits = 5
private[immutable] val rootsize = 1 << 5
@@ -274,18 +260,3 @@ object HashSetCombiner {
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 085f5220dd..d8c42d74b0 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -15,6 +15,7 @@ import scala.collection.generic._
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
+import scala.collection.GenIterable
/** A template trait for immutable parallel iterable collections.
@@ -29,10 +30,10 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParIterable[+T]
-extends collection.immutable.Iterable[T]
+extends collection/*.immutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 5db07a6a3b..a44a8c965e 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -21,6 +21,8 @@ import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
+import scala.collection.GenMapLike
+
@@ -36,7 +38,7 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, +V]
-extends Map[K, V]
+extends collection/*.immutable*/.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 157f6746fa..198274f4e2 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -14,7 +14,7 @@ package scala.collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.ParIterableIterator
+import scala.collection.parallel.IterableSplitter
@@ -45,7 +45,7 @@ self =>
@inline final def apply(idx: Int) = range.apply(idx);
- def parallelIterator = new ParRangeIterator with SCPI
+ def splitter = new ParRangeIterator with SCPI
type SCPI = SignalContextPassingIterator[ParRangeIterator]
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index a922642022..bf3d3a5aa8 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -18,7 +18,7 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-
+import scala.collection.GenSeq
@@ -28,7 +28,7 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection.immutable.Seq[T]
+extends collection/*.immutable*/.GenSeq[T]
with collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
@@ -44,11 +44,11 @@ extends collection.immutable.Seq[T]
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
- def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
+ def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
- def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
+ def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
}
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 73d27df994..a39607ba03 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -15,6 +15,7 @@ package parallel.immutable
+import scala.collection.GenSet
import scala.collection.immutable.Set
import scala.collection.generic._
import scala.collection.parallel.ParSetLike
@@ -29,7 +30,7 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends Set[T]
+extends collection/*.immutable*/.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 05e057a2a4..d1cf3d58ec 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -16,8 +16,7 @@ package parallel.immutable
import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory}
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParSeqIterator
-import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.SeqSplitter
import mutable.ArrayBuffer
import immutable.Vector
import immutable.VectorBuilder
@@ -55,7 +54,7 @@ extends ParSeq[T]
def length = vector.length
- def parallelIterator: ParSeqIterator[T] = {
+ def splitter: SeqSplitter[T] = {
val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI
vector.initIterator(pit)
pit
@@ -66,7 +65,7 @@ extends ParSeq[T]
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator {
self: SCPI =>
def remaining: Int = remainingElementCount
- def dup: ParSeqIterator[T] = (new ParVector(remainingVector)).parallelIterator
+ def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
def split: Seq[ParVectorIterator] = {
val rem = remaining
if (rem >= 2) psplit(rem / 2, rem - rem / 2)
@@ -79,7 +78,7 @@ extends ParSeq[T]
splitted += remvector.take(sz)
remvector = remvector.drop(sz)
}
- splitted.map(v => new ParVector(v).parallelIterator.asInstanceOf[ParVectorIterator])
+ splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
}
}
@@ -95,15 +94,15 @@ object ParVector extends ParFactory[ParVector] {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] =
new GenericCanCombineFrom[T]
- def newBuilder[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] with EPC[T, ParVector[T]]
+ def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T]
- def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] with EPC[T, ParVector[T]]
+ def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]]
}
private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] {
-self: EnvironmentPassingCombiner[T, ParVector[T]] =>
+//self: EnvironmentPassingCombiner[T, ParVector[T]] =>
var sz = 0
val vectors = new ArrayBuffer[VectorBuilder[T]] += new VectorBuilder[T]
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 87d5bfc9f9..c62459deeb 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -43,7 +43,7 @@ package object immutable {
type SCPI = SignalContextPassingIterator[ParIterator]
class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
+ me: SignalContextPassingIterator[ParIterator] =>
def remaining = until - i
def hasNext = i < until
def next = { i += 1; elem }
@@ -55,7 +55,7 @@ package object immutable {
def split = psplit(remaining / 2, remaining - remaining / 2)
}
- def parallelIterator = new ParIterator with SCPI
+ def splitter = new ParIterator with SCPI
}
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 8f7a62aa4f..def6fa7742 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.collection.parallel.mutable
@@ -30,7 +29,7 @@ import scala.collection.parallel.Combiner
*/
trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To]
{
-self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
+//self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
val chain: ArrayBuffer[Buff]
val lastbuff = chain.last
def +=(elem: Elem) = { lastbuff += elem; this }
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 0ac009d58c..a1eb3beb0c 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -23,6 +23,7 @@ import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.CHECK_RATE
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.Builder
+import scala.collection.GenTraversableOnce
@@ -75,7 +76,7 @@ self =>
type SCPI = SignalContextPassingIterator[ParArrayIterator]
- def parallelIterator: ParArrayIterator = {
+ protected[parallel] def splitter: ParArrayIterator = {
val pit = new ParArrayIterator with SCPI
pit
}
@@ -178,7 +179,7 @@ self =>
override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op)
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
+ override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
override def sum[U >: T](implicit num: Numeric[U]): U = {
var s = sum_quick(num, arr, until, i, num.zero)
@@ -409,12 +410,12 @@ self =>
}
}
- override def flatmap2combiner[S, That](f: T => TraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
+ override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(self.repr)
while (i < until) {
val traversable = f(arr(i).asInstanceOf[T])
if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
- else cb ++= traversable
+ else cb ++= traversable.seq
i += 1
}
cb
@@ -592,7 +593,7 @@ self =>
(new ParArray[S](targarrseq)).asInstanceOf[That]
} else super.map(f)(bf)
- override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[ParArray[T], U, That]): That =
+ override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That =
if (parallelismLevel > 1 && buildsArray(cbf(repr))) {
// reserve an array
val targarrseq = new ArraySeq[U](length + 1)
@@ -600,7 +601,7 @@ self =>
targetarr(0) = z
// do a parallel prefix scan
- if (length > 0) executeAndWaitResult(new CreateScanTree[U](0, size, z, op, parallelIterator) mapResult {
+ if (length > 0) executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult {
tree => executeAndWaitResult(new ScanToArray(tree, z, op, targetarr))
})
@@ -710,11 +711,10 @@ object ParArray extends ParFactory[ParArray] {
handoff(newarr)
}
- def fromTraversables[T](xss: TraversableOnce[T]*) = {
+ def fromTraversables[T](xss: GenTraversableOnce[T]*) = {
val cb = ParArrayCombiner[T]()
for (xs <- xss) {
- val it = xs.toIterator
- while (it.hasNext) cb += it.next
+ cb ++= xs.seq
}
cb.result
}
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index f2205fbb17..35c748916c 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -9,7 +9,7 @@
package scala.collection
package parallel.mutable
-import collection.parallel.ParIterableIterator
+import collection.parallel.IterableSplitter
/** Parallel flat hash table.
*
@@ -24,7 +24,7 @@ trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
override def alwaysInitSizeMap = true
abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int)
- extends ParIterableIterator[T] with SizeMapUtils {
+ extends IterableSplitter[T] with SizeMapUtils {
import collection.DebugUtils._
private var traversed = 0
@@ -42,7 +42,7 @@ trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
throw new IndexOutOfBoundsException(idx.toString)
}
- def newIterator(index: Int, until: Int, totalsize: Int): ParIterableIterator[T]
+ def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T]
def remaining = totalsize - traversed
def hasNext = traversed < totalsize
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index a3f3c33af5..37065e32fc 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -56,7 +56,7 @@ self =>
override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
- def parallelIterator = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
+ def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
override def size = tableSize
@@ -68,15 +68,15 @@ self =>
else Some(e.value)
}
- override def put(key: K, value: V): Option[V] = {
+ def put(key: K, value: V): Option[V] = {
val e = findEntry(key)
if (e == null) { addEntry(new Entry(key, value)); None }
else { val v = e.value; e.value = value; Some(v) }
}
- override def update(key: K, value: V): Unit = put(key, value)
+ def update(key: K, value: V): Unit = put(key, value)
- override def remove(key: K): Option[V] = {
+ def remove(key: K): Option[V] = {
val e = removeEntry(key)
if (e ne null) Some(e.value)
else None
@@ -160,8 +160,8 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa
extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with collection.mutable.HashTable.HashUtils[K]
{
-self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
- import tasksupport._
+//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
+ import collection.parallel.tasksupport._
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
@@ -315,7 +315,7 @@ private[parallel] object ParHashMapCombiner {
private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
private[mutable] val nonmasklength = 32 - discriminantbits
- def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
+ def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 6d82e1b6aa..0e48995cbe 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -12,7 +12,6 @@ import collection.generic._
import collection.mutable.HashSet
import collection.mutable.FlatHashTable
import collection.parallel.Combiner
-import collection.parallel.EnvironmentPassingCombiner
import collection.mutable.UnrolledBuffer
/** A parallel hash set.
@@ -46,11 +45,11 @@ extends ParSet[T]
override def empty = new ParHashSet
- override def iterator = parallelIterator
+ override def iterator = splitter
override def size = tableSize
- override def clear() = clearTable()
+ def clear() = clearTable()
override def seq = new HashSet(hashTableContents)
@@ -68,7 +67,7 @@ extends ParSet[T]
def contains(elem: T) = containsEntry(elem)
- def parallelIterator = new ParHashSetIterator(0, table.length, size) with SCPI
+ def splitter = new ParHashSetIterator(0, table.length, size) with SCPI
type SCPI = SignalContextPassingIterator[ParHashSetIterator]
@@ -117,8 +116,8 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
with collection.mutable.FlatHashTable.HashUtils[T] {
-self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- import tasksupport._
+//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
+ import collection.parallel.tasksupport._
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
@@ -316,6 +315,6 @@ private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
private[mutable] val nonmasklength = 32 - discriminantbits
- def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) with EnvironmentPassingCombiner[T, ParHashSet[T]]
+ def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index be1cbf1333..e5c0be36a1 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -14,7 +14,7 @@ package parallel.mutable
import collection.mutable.HashEntry
-import collection.parallel.ParIterableIterator
+import collection.parallel.IterableSplitter
@@ -28,9 +28,9 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
/** A parallel iterator returning all the entries.
*/
- abstract class EntryIterator[T, +IterRepr <: ParIterableIterator[T]]
+ abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]]
(private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
- extends ParIterableIterator[T] with SizeMapUtils {
+ extends IterableSplitter[T] with SizeMapUtils {
private val itertable = table
private var traversed = 0
scan()
@@ -78,7 +78,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
def dup = newIterator(idx, until, totalsize, es)
- def split: Seq[ParIterableIterator[T]] = if (remaining > 1) {
+ def split: Seq[IterableSplitter[T]] = if (remaining > 1) {
if (until > idx) {
// there is at least one more slot for the next iterator
// divide the rest of the table
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferIterator[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index cd0c45bd0d..700d21d0bb 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -6,14 +6,13 @@
** |/ **
\* */
-
package scala.collection.parallel.mutable
import scala.collection.generic._
-
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
+import scala.collection.GenIterable
/** A template trait for mutable parallel iterable collections.
@@ -27,16 +26,19 @@ import scala.collection.parallel.Combiner
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection.mutable.Iterable[T]
+trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]] {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
+ //protected[this] override def newBuilder = ParIterable.newBuilder[T]
- // if `mutable.ParIterableLike` is introduced, please move these 4 methods there
+ // if `mutable.ParIterableLike` is introduced, please move these methods there
override def toIterable: ParIterable[T] = this
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
+
+ def seq: collection.mutable.Iterable[T]
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index f4fa7303c4..91c2b3d2b2 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -28,17 +28,21 @@ import collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection.mutable.Map[K, V]
+extends collection/*.mutable*/.GenMap[K, V]
with collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
{
+ protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
+
override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap
override def empty: ParMap[K, V] = new ParHashMap[K, V]
+ def seq: collection.mutable.Map[K, V]
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 52e11c6db3..aff590d5bb 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.collection.parallel
package mutable
@@ -14,6 +13,7 @@ package mutable
import collection.generic._
import collection.mutable.Builder
+import collection.mutable.Cloneable
@@ -32,12 +32,23 @@ trait ParMapLike[K,
V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
+Sequential <: collection.mutable.Map[K, V] with collection.mutable.MapLike[K, V, Sequential]]
-extends collection.mutable.MapLike[K, V, Repr]
- with collection.parallel.ParMapLike[K, V, Repr, Sequential] {
+extends collection.GenMapLike[K, V, Repr]
+ with collection.parallel.ParMapLike[K, V, Repr, Sequential]
+ with Cloneable[Repr] {
// note: should not override toMap
- override def clear(): Unit = throw new UnsupportedOperationException("Must be reimplemented for parallel map implementations.")
+ def put(key: K, value: V): Option[V]
+
+ def +=(kv: (K, V)): this.type
+
+ def -=(key: K): this.type
+
+ def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[ParMap[K, U]] += kv
+
+ def -(key: K) = this.clone() -= key
+
+ def clear(): Unit
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 26320c1bc3..a48ba48d56 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -17,7 +17,7 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-
+import scala.collection.GenSeq
@@ -29,19 +29,20 @@ import scala.collection.parallel.Combiner
* @define Coll mutable.ParSeq
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection.mutable.Seq[T]
+trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: collection.mutable.Seq[T]
with ParIterable[T]
with collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
- self =>
+self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
+ //protected[this] override def newBuilder = ParSeq.newBuilder[T]
def update(i: Int, elem: T): Unit
- override def toSeq: ParSeq[T] = this
+ def seq: collection.mutable.Seq[T]
- override def transform(f: T => T): this.type = throw new UnsupportedOperationException("Not supported for parallel sequences.")
+ override def toSeq: ParSeq[T] = this
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index b7f4b61d66..1d295fd5fe 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -13,7 +13,7 @@ package scala.collection.parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
-
+import scala.collection.GenSet
@@ -23,17 +23,20 @@ import scala.collection.parallel.Combiner
*
* @define Coll mutable.ParSet
* @define coll mutable parallel set
+ *
+ * @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection.mutable.Set[T]
+extends collection/*.mutable*/.GenSet[T]
with ParIterable[T]
with collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
{
self =>
- override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet;
+ override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def empty: ParSet[T] = ParHashSet()
+ def seq: collection.mutable.Set[T]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 68f142cda7..7c9767befd 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -14,8 +14,8 @@ package parallel.mutable
import scala.collection.mutable.Set
import scala.collection.mutable.Builder
-
-
+import scala.collection.mutable.Cloneable
+import scala.collection.GenSetLike
@@ -35,16 +35,21 @@ import scala.collection.mutable.Builder
trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]]
-extends mutable.SetLike[T, Repr]
+extends GenSetLike[T, Repr]
with collection.parallel.ParIterableLike[T, Repr, Sequential]
with collection.parallel.ParSetLike[T, Repr, Sequential]
-{ self =>
+ with Cloneable[Repr]
+{
+self =>
+ override def empty: Repr
- protected[this] override def newBuilder: Builder[T, Repr] = newCombiner
+ def +=(elem: T): this.type
- protected[this] override def newCombiner: parallel.Combiner[T, Repr]
+ def -=(elem: T): this.type
- override def empty: Repr
+ def +(elem: T) = this.clone() += elem
+
+ def -(elem: T) = this.clone() -= elem
// note: should not override toSet
}
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
new file mode 100644
index 0000000000..8290438c10
--- /dev/null
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -0,0 +1,111 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel.mutable
+
+
+
+import scala.collection.generic.Sizing
+import scala.collection.mutable.ArraySeq
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.parallel.TaskSupport
+//import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.unsupportedop
+import scala.collection.parallel.Combiner
+
+
+
+/** An array combiner that uses a chain of arraybuffers to store elements. */
+trait ResizableParArrayCombiner[T]
+extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]]
+{
+//self: EnvironmentPassingCombiner[T, ParArray[T]] =>
+ import collection.parallel.tasksupport._
+
+ override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
+
+ def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
+
+ def allocateAndCopy = if (chain.size > 1) {
+ val arrayseq = new ArraySeq[T](size)
+ val array = arrayseq.array.asInstanceOf[Array[Any]]
+
+ executeAndWaitResult(new CopyChainToArray(array, 0, size))
+
+ new ParArray(arrayseq)
+ } else { // optimisation if there is only 1 array
+ val pa = new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size))
+ pa
+ }
+
+ override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain
+
+ /* tasks */
+
+ class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] {
+ var result = ()
+ def leaf(prev: Option[Unit]) = if (howmany > 0) {
+ var totalleft = howmany
+ val (stbuff, stind) = findStart(offset)
+ var buffind = stbuff
+ var ind = stind
+ var arrayIndex = offset
+ while (totalleft > 0) {
+ val currbuff = chain(buffind)
+ val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind
+ val until = ind + chunksize
+
+ copyChunk(currbuff.internalArray, ind, array, arrayIndex, until)
+ arrayIndex += chunksize
+ ind += chunksize
+
+ totalleft -= chunksize
+ buffind += 1
+ ind = 0
+ }
+ }
+ private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) {
+ Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart)
+ }
+ private def findStart(pos: Int) = {
+ var left = pos
+ var buffind = 0
+ while (left >= chain(buffind).size) {
+ left -= chain(buffind).size
+ buffind += 1
+ }
+ (buffind, left)
+ }
+ def split = {
+ val fp = howmany / 2
+ List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
+ }
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
+ }
+
+}
+
+
+object ResizableParArrayCombiner {
+ def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = {
+ new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]]
+ }
+ def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T])
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 9e27f6c58c..a4dc9b4a14 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.parallel.mutable
@@ -10,7 +18,7 @@ import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.UnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.EnvironmentPassingCombiner
+//import scala.collection.parallel.EnvironmentPassingCombiner
import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
@@ -28,11 +36,11 @@ private[mutable] class DoublingUnrolledBuffer[T](implicit m: ClassManifest[T]) e
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
-self: EnvironmentPassingCombiner[T, ParArray[T]] =>
+//self: EnvironmentPassingCombiner[T, ParArray[T]] =>
// because size is doubling, random access is O(logn)!
val buff = new DoublingUnrolledBuffer[Any]
- import tasksupport._
+ import collection.parallel.tasksupport._
def +=(elem: T) = {
buff += elem
@@ -109,96 +117,6 @@ self: EnvironmentPassingCombiner[T, ParArray[T]] =>
object UnrolledParArrayCombiner {
- def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] with EnvironmentPassingCombiner[T, ParArray[T]]
-}
-
-
-/** An array combiner that uses a chain of arraybuffers to store elements. */
-trait ResizableParArrayCombiner[T]
-extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]]
-{
-self: EnvironmentPassingCombiner[T, ParArray[T]] =>
- import tasksupport._
-
- override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
-
- def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
-
- def allocateAndCopy = if (chain.size > 1) {
- val arrayseq = new ArraySeq[T](size)
- val array = arrayseq.array.asInstanceOf[Array[Any]]
-
- executeAndWaitResult(new CopyChainToArray(array, 0, size))
-
- new ParArray(arrayseq)
- } else { // optimisation if there is only 1 array
- val pa = new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size))
- pa
- }
-
- override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain
-
- /* tasks */
-
- class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] {
- var result = ()
- def leaf(prev: Option[Unit]) = if (howmany > 0) {
- var totalleft = howmany
- val (stbuff, stind) = findStart(offset)
- var buffind = stbuff
- var ind = stind
- var arrayIndex = offset
- while (totalleft > 0) {
- val currbuff = chain(buffind)
- val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind
- val until = ind + chunksize
-
- copyChunk(currbuff.internalArray, ind, array, arrayIndex, until)
- arrayIndex += chunksize
- ind += chunksize
-
- totalleft -= chunksize
- buffind += 1
- ind = 0
- }
- }
- private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) {
- Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart)
- }
- private def findStart(pos: Int) = {
- var left = pos
- var buffind = 0
- while (left >= chain(buffind).size) {
- left -= chain(buffind).size
- buffind += 1
- }
- (buffind, left)
- }
- def split = {
- val fp = howmany / 2
- List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
- }
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
- }
-
+ def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
-
-object ResizableParArrayCombiner {
- def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = {
- new { val chain = c } with ResizableParArrayCombiner[T] with EnvironmentPassingCombiner[T, ParArray[T]]
- }
- def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T])
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 6efff70606..decae62dd2 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -88,7 +88,7 @@ package object parallel {
def toParArray: ParArray[T]
}
- implicit def traversable2ops[T](t: TraversableOnce[T]) = new TraversableOps[T] {
+ implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
def isParallel = t.isInstanceOf[Parallel]
def isParIterable = t.isInstanceOf[ParIterable[_]]
def asParIterable = t.asInstanceOf[ParIterable[T]]
@@ -128,9 +128,9 @@ package object parallel {
/** A helper iterator for iterating very small array buffers.
* Automatically forwards the signal delegate when splitting.
*/
- private[parallel] class BufferIterator[T]
+ private[parallel] class BufferSplitter[T]
(private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling)
- extends ParIterableIterator[T] {
+ extends IterableSplitter[T] {
def hasNext = index < until
def next = {
val r = buffer(index)
@@ -138,12 +138,12 @@ package object parallel {
r
}
def remaining = until - index
- def dup = new BufferIterator(buffer, index, until, signalDelegate)
- def split: Seq[ParIterableIterator[T]] = if (remaining > 1) {
+ def dup = new BufferSplitter(buffer, index, until, signalDelegate)
+ def split: Seq[IterableSplitter[T]] = if (remaining > 1) {
val divsz = (until - index) / 2
Seq(
- new BufferIterator(buffer, index, index + divsz, signalDelegate),
- new BufferIterator(buffer, index + divsz, until, signalDelegate)
+ new BufferSplitter(buffer, index, index + divsz, signalDelegate),
+ new BufferSplitter(buffer, index + divsz, until, signalDelegate)
)
} else Seq(this)
private[parallel] override def debugInformation = {
@@ -186,7 +186,7 @@ package object parallel {
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
(private val bucketnumber: Int)
extends Combiner[Elem, To] {
- self: EnvironmentPassingCombiner[Elem, To] =>
+ //self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
protected var sz: Int = 0
diff --git a/src/library/scala/concurrent/MailBox.scala b/src/library/scala/concurrent/MailBox.scala
index d9a8746ba3..b00ab79919 100644
--- a/src/library/scala/concurrent/MailBox.scala
+++ b/src/library/scala/concurrent/MailBox.scala
@@ -16,7 +16,7 @@ package scala.concurrent
* @version 1.0, 12/03/2003
*/
//class MailBox with Monitor with LinkedListQueueCreator {
-@deprecated("use actors instead")
+@deprecated("use actors instead", "2.8.0")
class MailBox extends AnyRef with ListQueueCreator {
type Message = AnyRef
@@ -108,7 +108,7 @@ class MailBox extends AnyRef with ListQueueCreator {
/**
* Module for dealing with queues.
*/
-@deprecated("use actors instead")
+@deprecated("use actors instead", "2.8.0")
trait QueueModule[A] {
/** Type of queues. */
type T
@@ -121,7 +121,7 @@ trait QueueModule[A] {
}
/** Inefficient but simple queue module creator. */
-@deprecated("use actors instead")
+@deprecated("use actors instead", "2.8.0")
trait ListQueueCreator {
def queueCreate[A]: QueueModule[A] = new QueueModule[A] {
type T = List[A]
@@ -143,7 +143,7 @@ trait ListQueueCreator {
}
/** Efficient queue module creator based on linked lists. */
-@deprecated("use actors instead")
+@deprecated("use actors instead", "2.8.0")
trait LinkedListQueueCreator {
import scala.collection.mutable.LinkedList
def queueCreate[A >: Null <: AnyRef]: QueueModule[A] = new QueueModule[A] {
diff --git a/src/library/scala/concurrent/TIMEOUT.scala b/src/library/scala/concurrent/TIMEOUT.scala
index 512879760b..54d7db853c 100644
--- a/src/library/scala/concurrent/TIMEOUT.scala
+++ b/src/library/scala/concurrent/TIMEOUT.scala
@@ -17,5 +17,5 @@ package scala.concurrent
* @author Martin Odersky
* @version 1.0, 10/03/2003
*/
-@deprecated("use actors instead")
+@deprecated("use actors instead", "2.8.0")
case object TIMEOUT
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
index 5542472ce1..9498a62f32 100644
--- a/src/library/scala/concurrent/ops.scala
+++ b/src/library/scala/concurrent/ops.scala
@@ -68,7 +68,7 @@ object ops
* @param end ...
* @param p ...
*/
- @deprecated("use `collection.parallel.ParIterable.foreach' instead")
+ @deprecated("use `collection.parallel.ParIterable.foreach' instead", "2.9.0")
def replicate(start: Int, end: Int)(p: Int => Unit)(implicit runner: TaskRunner = defaultRunner) {
if (start == end)
()
diff --git a/src/library/scala/concurrent/pilib.scala b/src/library/scala/concurrent/pilib.scala
index a81df2d622..cad8aecf86 100644
--- a/src/library/scala/concurrent/pilib.scala
+++ b/src/library/scala/concurrent/pilib.scala
@@ -29,7 +29,7 @@ package scala.concurrent
* @author Vincent Cremet, Martin Odersky
* @version 1.0
*/
-@deprecated("use actors instead")
+@deprecated("use actors instead", "2.8.0")
object pilib {
import TaskRunners.threadRunner
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index 0f5ec16af0..f64e34a5ac 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
import annotation.target._
@@ -19,6 +17,4 @@ import annotation.target._
* @since 2.3
*/
@getter @setter @beanGetter @beanSetter
-class deprecated(message: String) extends annotation.StaticAnnotation {
- def this() = this("")
-}
+class deprecated(message: String = "", since: String = "") extends annotation.StaticAnnotation
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 3cc7a481f3..5b6accfea3 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -66,9 +66,12 @@ abstract class Position {
object Position extends Position {
/** The undefined position */
- @deprecated("This will be removed") final val NOPOS = 0
+ @deprecated("This will be removed", "2.9.0")
+ final val NOPOS = 0
+
/** The first position in a source file */
- @deprecated("This will be removed") final val FIRSTPOS = encode(1, 1)
+ @deprecated("This will be removed", "2.9.0")
+ final val FIRSTPOS = encode(1, 1)
def checkInput(line: Int, column: Int) {
if (line < 0)
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index c81f2bfed0..103be17228 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -196,7 +196,7 @@ abstract class Source extends Iterator[Char] {
* @return the specified line.
*
*/
- @deprecated("Use a collections method such as getLines().toIndexedSeq for random access.")
+ @deprecated("Use a collections method such as getLines().toIndexedSeq for random access.", "2.8.0")
def getLine(line: Int): String = lineNum(line)
private def lineNum(line: Int): String = getLines() drop (line - 1) take 1 mkString
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
index 389d4bd719..72a1dafc32 100644
--- a/src/library/scala/io/UTF8Codec.scala
+++ b/src/library/scala/io/UTF8Codec.scala
@@ -30,35 +30,35 @@ object UTF8Codec
// Some useful locations:
// http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
- @deprecated("""Use new String(Array(ch), 0, 1).getBytes("UTF-8") instead""")
+ @deprecated("""Use new String(Array(ch), 0, 1).getBytes("UTF-8") instead""", "2.8.0")
def encode(ch: Int): Array[Byte] =
if ((Character getType ch) == Character.SURROGATE.toInt) UNI_REPLACEMENT_BYTES
else try new String(Array(ch), 0, 1) getBytes "UTF-8" catch {
case _: IllegalArgumentException => UNI_REPLACEMENT_BYTES
}
- @deprecated("Use Codec.toUTF8 instead")
+ @deprecated("Use Codec.toUTF8 instead", "2.8.0")
def encode(src: Array[Char], from: Int, dst: Array[Byte], to: Int, len: Int): Int = {
val bytes = Codec toUTF8 src.slice(from, from + len)
Array.copy(bytes, 0, dst, to, bytes.length)
bytes.length
}
- @deprecated("Use Codec.toUTF8 instead")
+ @deprecated("Use Codec.toUTF8 instead", "2.8.0")
def encode(s: String, dst: Array[Byte], to: Int): Int =
encode(s.toArray, 0, dst, to, s.length)
- @deprecated("Use Codec.toUTF8 instead")
+ @deprecated("Use Codec.toUTF8 instead", "2.8.0")
def encode(s: String): Array[Byte] = Codec toUTF8 s
- @deprecated("Use Codec.fromUTF8 instead")
+ @deprecated("Use Codec.fromUTF8 instead", "2.8.0")
def decode(src: Array[Byte], from: Int, dst: Array[Char], to: Int, len: Int): Int = {
val chars = Codec fromUTF8 src.slice(from, from + len)
Array.copy(chars, 0, dst, to, chars.length)
chars.length
}
- @deprecated("Use Codec.fromUTF8 instead")
+ @deprecated("Use Codec.fromUTF8 instead", "2.8.0")
def decode(src: Array[Byte], from: Int, len: Int): String =
Codec fromUTF8 src.slice(from, from + len) mkString
}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 684769bef1..16fdf0897e 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -25,10 +25,10 @@ object BigDecimal {
private val maxCached = 512
val defaultMathContext = MathContext.DECIMAL128
- @deprecated("Use Long.MinValue")
+ @deprecated("Use Long.MinValue", "2.9.0")
val MinLong = new BigDecimal(BigDec valueOf Long.MinValue, defaultMathContext)
- @deprecated("Use Long.MaxValue")
+ @deprecated("Use Long.MaxValue", "2.9.0")
val MaxLong = new BigDecimal(BigDec valueOf Long.MaxValue, defaultMathContext)
/** Cache ony for defaultMathContext using BigDecimals in a small range. */
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 71d6df9e6e..caffbe6133 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -23,10 +23,10 @@ object BigInt {
private val maxCached = 1024
private val cache = new Array[BigInt](maxCached - minCached + 1)
- @deprecated("Use Long.MinValue")
+ @deprecated("Use Long.MinValue", "2.9.0")
val MinLong = BigInt(Long.MinValue)
- @deprecated("Use Long.MaxValue")
+ @deprecated("Use Long.MaxValue", "2.9.0")
val MaxLong = BigInt(Long.MaxValue)
/** Constructs a <code>BigInt</code> whose value is equal to that of the
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 23bd6bb6e7..0e966acd6b 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -28,7 +28,7 @@ package object scala {
type NumberFormatException = java.lang.NumberFormatException
type AbstractMethodError = java.lang.AbstractMethodError
- @deprecated("instead of `@serializable class C`, use `class C extends Serializable`")
+ @deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
type serializable = annotation.serializable
type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
@@ -75,7 +75,7 @@ package object scala {
val $scope = scala.xml.TopScope
- @deprecated("Use Thread.currentThread instead")
+ @deprecated("Use Thread.currentThread instead", "2.9.0")
def currentThread = java.lang.Thread.currentThread()
// Numeric types which were moved into scala.math.*
@@ -121,67 +121,77 @@ package object scala {
type unchecked = annotation.unchecked.unchecked
type volatile = annotation.volatile
*/
- @deprecated("Use Tuple1(x) to create a 1-tuple.")
+ @deprecated("Use Tuple1(x) to create a 1-tuple.", "2.8.0")
def Tuple[A1](x1: A1) = Tuple1(x1)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2](x1: A1, x2: A2) = Tuple2(x1, x2)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3](x1: A1, x2: A2, x3: A3) = Tuple3(x1, x2, x3)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4](x1: A1, x2: A2, x3: A3, x4: A4) = Tuple4(x1, x2, x3, x4)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5) = Tuple5(x1, x2, x3, x4, x5)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6) = Tuple6(x1, x2, x3, x4, x5, x6)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7) = Tuple7(x1, x2, x3, x4, x5, x6, x7)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8) = Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9) = Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10) = Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11) = Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12) = Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13) = Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14) = Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15) = Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16) = Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17) = Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18) = Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19) = Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20) = Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21) = Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21, x22: A22) = Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
- @deprecated("use java.lang.Integer instead")
+ @deprecated("use java.lang.Integer instead", "2.6.0")
type Integer = java.lang.Integer
- @deprecated("use java.lang.Character instead")
+ @deprecated("use java.lang.Character instead", "2.6.0")
type Character = java.lang.Character
- @deprecated("use Iterable instead") type Collection[+A] = Iterable[A]
- @deprecated("use Iterable instead") val Collection = Iterable
-
- @deprecated("use Seq instead") type Sequence[+A] = scala.collection.Seq[A]
- @deprecated("use Seq instead") val Sequence = scala.collection.Seq
-
- @deprecated("use IndexedSeq instead") type RandomAccessSeq[+A] = scala.collection.IndexedSeq[A]
- @deprecated("use IndexedSeq instead") val RandomAccessSeq = scala.collection.IndexedSeq
-
- @deprecated("use scala.annotation.Annotation instead") type Annotation = scala.annotation.Annotation
- @deprecated("use scala.annotation.ClassfileAnnotation instead") type ClassfileAnnotation = scala.annotation.ClassfileAnnotation
- @deprecated("use scala.annotation.StaticAnnotation instead") type StaticAnnotation = scala.annotation.StaticAnnotation
- @deprecated("use scala.annotation.TypeConstraint instead") type TypeConstraint = scala.annotation.TypeConstraint
+ @deprecated("use Iterable instead", "2.8.0")
+ type Collection[+A] = Iterable[A]
+ @deprecated("use Iterable instead", "2.8.0")
+ val Collection = Iterable
+
+ @deprecated("use Seq instead", "2.8.0")
+ type Sequence[+A] = scala.collection.Seq[A]
+ @deprecated("use Seq instead", "2.8.0")
+ val Sequence = scala.collection.Seq
+
+ @deprecated("use IndexedSeq instead", "2.8.0")
+ type RandomAccessSeq[+A] = scala.collection.IndexedSeq[A]
+ @deprecated("use IndexedSeq instead", "2.8.0")
+ val RandomAccessSeq = scala.collection.IndexedSeq
+
+ @deprecated("use scala.annotation.Annotation instead", "2.9.0")
+ type Annotation = scala.annotation.Annotation
+ @deprecated("use scala.annotation.ClassfileAnnotation instead", "2.9.0")
+ type ClassfileAnnotation = scala.annotation.ClassfileAnnotation
+ @deprecated("use scala.annotation.StaticAnnotation instead", "2.9.0")
+ type StaticAnnotation = scala.annotation.StaticAnnotation
+ @deprecated("use scala.annotation.TypeConstraint instead", "2.9.0")
+ type TypeConstraint = scala.annotation.TypeConstraint
}
diff --git a/src/library/scala/reflect/generic/ByteCodecs.scala b/src/library/scala/reflect/generic/ByteCodecs.scala
index 2fa314782c..ae15e2941b 100644
--- a/src/library/scala/reflect/generic/ByteCodecs.scala
+++ b/src/library/scala/reflect/generic/ByteCodecs.scala
@@ -119,7 +119,7 @@ object ByteCodecs {
dst
}
- @deprecated("use 2-argument version instead")
+ @deprecated("use 2-argument version instead", "2.8.0")
def decode7to8(src: Array[Byte], srclen: Int, dstlen: Int) { decode7to8(src, srclen) }
def decode7to8(src: Array[Byte], srclen: Int): Int = {
@@ -190,7 +190,7 @@ object ByteCodecs {
def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
- @deprecated("use 1-argument version instead")
+ @deprecated("use 1-argument version instead", "2.8.0")
def decode(xs: Array[Byte], dstlen: Int) { decode(xs) }
/**
diff --git a/src/library/scala/reflect/generic/HasFlags.scala b/src/library/scala/reflect/generic/HasFlags.scala
index 2f635fbd79..ca8e2ede86 100644
--- a/src/library/scala/reflect/generic/HasFlags.scala
+++ b/src/library/scala/reflect/generic/HasFlags.scala
@@ -211,7 +211,8 @@ trait HasFlags {
// Keeping some potentially ambiguous names around so as not to break
// the rest of the world
- @deprecated("") def isAbstract = hasFlag(ABSTRACT)
+ @deprecated("", "2.9.0")
+ def isAbstract = hasFlag(ABSTRACT)
// Problematic:
// ABSTRACT and DEFERRED too easy to confuse, and
// ABSTRACT + OVERRIDE ==> ABSOVERRIDE adds to it.
diff --git a/src/library/scala/runtime/AnyValCompanion.scala b/src/library/scala/runtime/AnyValCompanion.scala
index 5f6f581f50..fdb0e8f638 100644
--- a/src/library/scala/runtime/AnyValCompanion.scala
+++ b/src/library/scala/runtime/AnyValCompanion.scala
@@ -10,5 +10,5 @@ package scala.runtime
/** See scala.AnyValCompanion.
*/
-@deprecated("Use scala.AnyValCompanion instead")
+@deprecated("Use scala.AnyValCompanion instead", "2.8.0")
private[scala] trait AnyValCompanion extends scala.AnyValCompanion { } \ No newline at end of file
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index b9f536de9a..358faf47f5 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -46,13 +46,13 @@ final class RichChar(val self: Char) extends IntegralProxy[Char] {
// public static boolean isJavaIdentifierStart(char ch)
// public static boolean isJavaIdentifierPart(char ch)
- @deprecated("Use ch.toLower instead")
+ @deprecated("Use ch.toLower instead", "2.8.0")
def toLowerCase: Char = toLower
- @deprecated("Use ch.toUpper instead")
+ @deprecated("Use ch.toUpper instead", "2.8.0")
def toUpperCase: Char = toUpper
- @deprecated("Use ch.isLower instead")
+ @deprecated("Use ch.isLower instead", "2.8.0")
def isLowerCase: Boolean = isLower
- @deprecated("Use ch.isUpper instead")
+ @deprecated("Use ch.isUpper instead", "2.8.0")
def isUpperCase: Boolean = isUpper
}
diff --git a/src/library/scala/runtime/package.scala b/src/library/scala/runtime/package.scala
index 83947cce9f..9c87baf6a7 100644
--- a/src/library/scala/runtime/package.scala
+++ b/src/library/scala/runtime/package.scala
@@ -1,13 +1,13 @@
package scala
package object runtime {
- @deprecated("Use `scala.Unit` instead.") val Unit = scala.Unit
- @deprecated("Use `scala.Boolean` instead.") val Boolean = scala.Boolean
- @deprecated("Use `scala.Byte` instead.") val Byte = scala.Byte
- @deprecated("Use `scala.Short` instead.") val Short = scala.Short
- @deprecated("Use `scala.Char` instead.") val Char = scala.Char
- @deprecated("Use `scala.Int` instead.") val Int = scala.Int
- @deprecated("Use `scala.Long` instead.") val Long = scala.Long
- @deprecated("Use `scala.Float` instead.") val Float = scala.Float
- @deprecated("Use `scala.Double` instead.") val Double = scala.Double
+ @deprecated("Use `scala.Unit` instead.", "2.9.0") val Unit = scala.Unit
+ @deprecated("Use `scala.Boolean` instead.", "2.9.0") val Boolean = scala.Boolean
+ @deprecated("Use `scala.Byte` instead.", "2.9.0") val Byte = scala.Byte
+ @deprecated("Use `scala.Short` instead.", "2.9.0") val Short = scala.Short
+ @deprecated("Use `scala.Char` instead.", "2.9.0") val Char = scala.Char
+ @deprecated("Use `scala.Int` instead.", "2.9.0") val Int = scala.Int
+ @deprecated("Use `scala.Long` instead.", "2.9.0") val Long = scala.Long
+ @deprecated("Use `scala.Float` instead.", "2.9.0") val Float = scala.Float
+ @deprecated("Use `scala.Double` instead.", "2.9.0") val Double = scala.Double
}
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index 84a176b26d..0e34e7be1b 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -81,12 +81,6 @@ trait ProcessBuilder extends Source with Sink {
* redirections (implemented as pipes) from masking useful process error codes.
*/
def hasExitValue: Boolean
-
- /** Constructs a new builder which runs this command with all input/output threads marked
- * as daemon threads. This allows the creation of a long running process while still
- * allowing the JVM to exit normally.
- */
- def daemonized(): ProcessBuilder
}
object ProcessBuilder extends ProcessBuilderImpl {
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 4f6d5211f3..58f06e1039 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -81,7 +81,7 @@ private[process] trait ProcessBuilderImpl {
override def canPipeTo = true
}
- private[process] abstract class AbstractBuilder extends ProcessBuilder with Sink with Source {
+ private[scala] abstract class AbstractBuilder extends ProcessBuilder with Sink with Source {
protected def toSource = this
protected def toSink = this
@@ -114,6 +114,13 @@ private[process] trait ProcessBuilderImpl {
def !< = run(true).exitValue()
def !<(log: ProcessLogger) = runBuffered(log, true)
+ /** Constructs a new builder which runs this command with all input/output threads marked
+ * as daemon threads. This allows the creation of a long running process while still
+ * allowing the JVM to exit normally.
+ *
+ * Note: not in the public API because it's not fully baked, but I need the capability
+ * for fsc.
+ */
def daemonized(): ProcessBuilder = new DaemonBuilder(this)
private[this] def slurp(log: Option[ProcessLogger], withIn: Boolean): String = {
diff --git a/src/library/scala/testing/SUnit.scala b/src/library/scala/testing/SUnit.scala
index 6a82e6d6e0..9720015d94 100644
--- a/src/library/scala/testing/SUnit.scala
+++ b/src/library/scala/testing/SUnit.scala
@@ -47,7 +47,8 @@ import xml.{ Node, NodeSeq }
* @author Burak Emir
*/
@deprecated("SUnit will be removed in 2.8.0. There are several free and sophisticated testing\n"+
- "frameworks for Scala available, examples are \"ScalaTest\", \"ScalaCheck\" or \"Specs\".")
+ """frameworks for Scala available, examples are "ScalaTest", "ScalaCheck" or "Specs".""",
+ "2.7.2")
object SUnit {
/** <p>
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index b6efde6c89..f1f6e08254 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -176,10 +176,10 @@ object MurmurHash {
* where the order of appearance of elements does not matter.
* This is useful for hashing sets, for example.
*/
- def symmetricHash[T](xs: TraversableOnce[T], seed: Int) = {
+ def symmetricHash[T](xs: collection.TraversableOnce[T], seed: Int) = {
var a,b,n = 0
var c = 1
- xs.foreach(i => {
+ xs.seq.foreach(i => {
val h = i.##
a += h
b ^= h
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index e4eb01108d..f86df0ee96 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -17,8 +17,7 @@ object Properties extends PropertiesTrait {
protected def pickJarBasedOn = classOf[ScalaObject]
}
-private[scala] trait PropertiesTrait
-{
+private[scala] trait PropertiesTrait {
protected def propCategory: String // specializes the remainder of the values
protected def pickJarBasedOn: Class[_] // props file comes from jar containing this
@@ -56,8 +55,40 @@ private[scala] trait PropertiesTrait
def envOrNone(name: String) = Option(System getenv name)
// for values based on propFilename
- def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
- def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
+ def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
+ def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
+ def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name))
+
+ /** The numeric portion of the runtime scala version, if this is a final
+ * release. If for instance the versionString says "version 2.9.0.final",
+ * this would return Some("2.9.0").
+ *
+ * @return Some(version) if this is a final release build, None if
+ * it is an RC, Beta, etc. or was built from source, or if the version
+ * cannot be read.
+ */
+ val releaseVersion = scalaPropOrNone("version.number") flatMap { s =>
+ val segments = s split '.'
+ if (segments.size == 4 && segments.last == "final") Some(segments take 3 mkString ".") else None
+ }
+
+ /** The development scala version, if this is not a final release.
+ * The precise contents are not guaranteed, but it aims to provide a
+ * unique repository identifier (currently the svn revision) in the
+ * fourth dotted segment if the running version was built from source.
+ *
+ * @return Some(version) if this is a non-final version, None if this
+ * is a final release or the version cannot be read.
+ */
+ val developmentVersion = scalaPropOrNone("version.number") flatMap { s =>
+ val segments = s split '.'
+ if (segments.isEmpty || segments.last == "final")
+ None
+ else if (segments.last startsWith "r")
+ Some(s takeWhile (ch => ch != '-')) // Cutting e.g. 2.10.0.r24774-b20110417125606 to 2.10.0.r24774
+ else
+ Some(s)
+ }
/** The version number of the jar this was loaded from plus "version " prefix,
* or "version (unknown)" if it cannot be determined.
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
index de2ae03a40..036964680e 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/library/scala/util/parsing/json/JSON.scala
@@ -41,9 +41,8 @@ object JSON extends Parser {
*
* @param input the given JSON string.
* @return an optional list of of elements.
- *
- * @deprecated Use parseFull or parseRaw as needed.
*/
+ @deprecated("Use parseFull or parseRaw as needed.", "2.8.0")
def parse(input: String): Option[List[Any]] = parseRaw(input).map(unRaw).flatMap({
case l : List[_] => Some(l)
case _ => None
diff --git a/src/library/scala/util/parsing/syntax/package.scala b/src/library/scala/util/parsing/syntax/package.scala
index 79343d5f80..547136c21d 100644
--- a/src/library/scala/util/parsing/syntax/package.scala
+++ b/src/library/scala/util/parsing/syntax/package.scala
@@ -14,6 +14,8 @@ import scala.util.parsing.combinator.token
* be done, but it doesn't (yet) so it isn't.
*/
package object syntax {
- @deprecated("Moved to scala.util.parsing.combinator.token") type Tokens = token.Tokens
- @deprecated("Moved to scala.util.parsing.combinator.token") type StdTokens = token.StdTokens
+ @deprecated("Moved to scala.util.parsing.combinator.token", "2.8.0")
+ type Tokens = token.Tokens
+ @deprecated("Moved to scala.util.parsing.combinator.token", "2.8.0")
+ type StdTokens = token.StdTokens
}
diff --git a/src/library/scala/xml/HasKeyValue.scala b/src/library/scala/xml/HasKeyValue.scala
index 0947448ff7..29c3f584c4 100644
--- a/src/library/scala/xml/HasKeyValue.scala
+++ b/src/library/scala/xml/HasKeyValue.scala
@@ -19,7 +19,7 @@ package scala.xml
*
* @author Burak Emir
*/
-@deprecated("Use UnprefixedAttribute's extractor")
+@deprecated("Use UnprefixedAttribute's extractor", "2.8.0")
class HasKeyValue(key: String) {
def unapplySeq(x: MetaData): Option[Seq[Node]] = x.get(key)
}
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index 806a746257..04dedfdf45 100644
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -51,11 +51,11 @@ object XML extends XMLLoader[Elem]
def withSAXParser(p: SAXParser): XMLLoader[Elem] =
new XMLLoader[Elem] { override val parser: SAXParser = p }
- @deprecated("Use save() instead")
+ @deprecated("Use save() instead", "2.8.0")
final def saveFull(filename: String, node: Node, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
save(filename, node, encoding, xmlDecl, doctype)
- @deprecated("Use save() instead")
+ @deprecated("Use save() instead", "2.8.0")
final def saveFull(filename: String, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
save(filename, node, enc, xmlDecl, doctype)
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
index d2adf3750a..59bbeee3a4 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
@@ -45,13 +45,11 @@ public abstract class Assembly extends CustomAttributeProvider {
// dir = dir.getCanonicalFile();
// } catch (java.io.IOException e) {}
- if (name.endsWith(".exe") || name.endsWith(".EXE") ||
- name.endsWith(".dll") || name.endsWith(".DLL"))
- {
+ if (name.toUpperCase().endsWith(".EXE") || name.toUpperCase().endsWith(".DLL")) {
file = new File(dir, name);
pefile = getPEFile(file);
name = name.substring(0, name.length() - 4);
- }
+ }
File adir = pefile == null ? new File(dir, name) : null;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
index 481d5f2116..3eb22b9985 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
@@ -95,7 +95,7 @@ public class PEFile {
/** Ecma 335, 25.2.1 MS-DOS header:
*
* "The PE format starts with an MS-DOS stub of exactly the following 128 bytes to
- * be placed at the front of the module."
+ * be placed at the front of the module."
*
* We are only checking for MZ (Mark Zbikowski)
*/
@@ -107,13 +107,13 @@ public class PEFile {
/** Ecma 335, 25.2.1 MS-DOS header:
*
* "At offset 0x3c in the DOS header is a 4-byte unsigned integer offset, lfanew,
- * to the PE signature (shall be "PE\0\0"), immediately followed by the PE file header.
+ * to the PE signature (shall be "PE\0\0"), immediately followed by the PE file header."
*/
seek(0x3c);
PE_SIGNATURE_OFFSET = readInt();
seek(PE_SIGNATURE_OFFSET);
-
+ // start of PE signature (a signature that is just 4 bytes long)
fileFormatCheck(readByte() != 0x50, "Invalid PE file format: " + filename); // 'P'
fileFormatCheck(readByte() != 0x45, "Invalid PE file format: " + filename); // 'E'
fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0
@@ -125,26 +125,19 @@ public class PEFile {
PE_HEADER_OFFSET = COFF_HEADER_OFFSET + 20;
seek(COFF_HEADER_OFFSET);
- skip(2);
- /** Ecma 335, 25.2.2: "Number of sections; indicates size of the Section Table" */
- numOfSections = readShort();
- //trace("Number of sections = " + numOfSections);
- /** Ecma 335, 25.2.2: "Time and date the file was created in seconds since
- * January 1st 1970 00:00:00 or 0."
- */
+ /* start of PE file header, Sec. 25.2.2 in Partition II */
+ skip(2); // Machine (always 0x14c)
+ numOfSections = readShort(); // Number of sections; indicates size of the Section Table
Date timeStamp = new Date(readInt() * 1000L);
- //trace("Time stamp = " + timeStamp);
-
- skip(2 * INT_SIZE);
+ skip(2 * INT_SIZE); // skip Pointer to Symbol Table (always 0) and Number of Symbols (always 0)
optHeaderSize = readShort();
int characteristics = readShort();
isDLL = (characteristics & 0x2000) != 0;
- //trace("Characteristics = " + Integer.toHexString(characteristics));
seek(PE_HEADER_OFFSET + 208); // p.157, Partition II
- CLI_RVA = readInt();
+ CLI_RVA = readInt(); // called "Data Directory Table" in Ch. 4 of Expert IL book
CLI_Length = readInt();
//trace("CLI_RVA = 0x" + Table.int2hex(CLI_RVA));
//trace("CLI_Length = 0x" + Table.int2hex(CLI_Length));
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index c7899c7f54..2223a6db0f 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -402,16 +402,20 @@ import ILGenerator._
* sets the line of the source file corresponding to the next instruction
*/
def setPosition(line: Int) {
- if (line != 0)
- lineNums.put(lastLabel, Integer.toString(line))
+ if (line != 0) lineNums.put(lastLabel, Integer.toString(line))
}
def setPosition(line: Int, filename: String) {
- if (line != 0)
- lineNums.put(lastLabel, line + " '" + filename + "'")
+ if (line != 0) lineNums.put(lastLabel, line + " '" + filename + "'")
}
- def getLocals(): Array[LocalBuilder] = localList.toArray
+ def setPosition(startLine: Int, endLine: Int, startCol: Int, endCol: Int, filename: String) {
+ val lineRange = startLine + "," + endLine
+ val colRange = startCol + "," + endCol
+ lineNums.put(lastLabel, lineRange + ":" + colRange + " '" + filename + "'")
+ }
+
+ def getLocals(): Array[LocalBuilder] = localList.toArray
def getLabelIterator() = labelList.iterator
diff --git a/test/files/jvm/JavaInteraction.check b/test/disabled/jvm/JavaInteraction.check
index fb9d3cdd8c..fb9d3cdd8c 100644
--- a/test/files/jvm/JavaInteraction.check
+++ b/test/disabled/jvm/JavaInteraction.check
diff --git a/test/files/jvm/JavaInteraction.scala b/test/disabled/jvm/JavaInteraction.scala
index 26fbc17e58..26fbc17e58 100644
--- a/test/files/jvm/JavaInteraction.scala
+++ b/test/disabled/jvm/JavaInteraction.scala
diff --git a/test/files/presentation/simple-tests.check b/test/disabled/presentation/simple-tests.check
index b90dfce77c..b90dfce77c 100644
--- a/test/files/presentation/simple-tests.check
+++ b/test/disabled/presentation/simple-tests.check
diff --git a/test/files/presentation/simple-tests.javaopts b/test/disabled/presentation/simple-tests.javaopts
index 4af888f9c2..4af888f9c2 100644
--- a/test/files/presentation/simple-tests.javaopts
+++ b/test/disabled/presentation/simple-tests.javaopts
diff --git a/test/files/presentation/simple-tests.opts b/test/disabled/presentation/simple-tests.opts
index 8529bbf1a0..8529bbf1a0 100644
--- a/test/files/presentation/simple-tests.opts
+++ b/test/disabled/presentation/simple-tests.opts
diff --git a/test/disabled/scalacheck/HashTrieSplit.scala b/test/disabled/scalacheck/HashTrieSplit.scala
new file mode 100644
index 0000000000..6b20efe12b
--- /dev/null
+++ b/test/disabled/scalacheck/HashTrieSplit.scala
@@ -0,0 +1,47 @@
+
+
+
+
+
+import collection._
+
+
+
+
+// checks whether hash tries split their iterators correctly
+// even after some elements have been traversed
+object Test {
+ def main(args: Array[String]) {
+ doesSplitOk
+ }
+
+ def doesSplitOk = {
+ val sz = 2000
+ var ht = new parallel.immutable.ParHashMap[Int, Int]
+ // println("creating trie")
+ for (i <- 0 until sz) ht += ((i + sz, i))
+ // println("created trie")
+ for (n <- 0 until (sz - 1)) {
+ // println("---------> n = " + n)
+ val pit = ht.parallelIterator
+ val pit2 = ht.parallelIterator
+ var i = 0
+ while (i < n) {
+ pit.next
+ pit2.next
+ i += 1
+ }
+ // println("splitting")
+ val pits = pit.split
+ val fst = pits(0).toSet
+ val snd = pits(1).toSet
+ val orig = pit2.toSet
+ if (orig.size != (fst.size + snd.size) || orig != (fst ++ snd)) {
+ println("Original: " + orig)
+ println("First: " + fst)
+ println("Second: " + snd)
+ assert(false)
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/ticket4283/AbstractFoo.java b/test/files/jvm/ticket4283/AbstractFoo.java
new file mode 100644
index 0000000000..74f3827fe3
--- /dev/null
+++ b/test/files/jvm/ticket4283/AbstractFoo.java
@@ -0,0 +1,5 @@
+package test;
+
+/* package private */ class AbstractFoo {
+ public int t;
+}
diff --git a/test/files/jvm/ticket4283/ScalaBipp.scala b/test/files/jvm/ticket4283/ScalaBipp.scala
new file mode 100644
index 0000000000..36dea9f4de
--- /dev/null
+++ b/test/files/jvm/ticket4283/ScalaBipp.scala
@@ -0,0 +1,5 @@
+package test
+
+class ScalaBipp extends AbstractFoo {
+ def make: Option[ScalaBipp] = Option(this)
+}
diff --git a/test/files/jvm/ticket4283/Test.scala b/test/files/jvm/ticket4283/Test.scala
new file mode 100644
index 0000000000..9bbfaab928
--- /dev/null
+++ b/test/files/jvm/ticket4283/Test.scala
@@ -0,0 +1,4 @@
+
+object Test extends App {
+ val x = (new test.ScalaBipp).make.get.t // java.lang.IllegalAccessError: tried to access class test.AbstractFoo from class other.IllegalAccess$
+}
diff --git a/test/files/neg/t3115.scala b/test/files/neg/t3115.scala
index 4aeeb4063e..3888085593 100755
--- a/test/files/neg/t3115.scala
+++ b/test/files/neg/t3115.scala
@@ -1,5 +1,5 @@
object sc {
- @deprecated("") object Math
+ @deprecated("", "2.8.0") object Math
}
object Test {
diff --git a/test/files/neg/t3774.check b/test/files/neg/t3774.check
index 59c63c4ee8..ea35c50541 100644
--- a/test/files/neg/t3774.check
+++ b/test/files/neg/t3774.check
@@ -1,6 +1,6 @@
t3774.scala:4: error: overloaded method value ++ with alternatives:
- [B1 >: List[Int]](xs: scala.collection.TraversableOnce[((Int, Int), B1)])scala.collection.immutable.Map[(Int, Int),B1] <and>
- [B >: ((Int, Int), List[Int]),That](that: scala.collection.TraversableOnce[B])(implicit bf: scala.collection.generic.CanBuildFrom[scala.collection.immutable.Map[(Int, Int),List[Int]],B,That])That
+ [B1 >: List[Int]](xs: scala.collection.GenTraversableOnce[((Int, Int), B1)])scala.collection.immutable.Map[(Int, Int),B1] <and>
+ [B >: ((Int, Int), List[Int]),That](that: scala.collection.GenTraversableOnce[B])(implicit bf: scala.collection.generic.CanBuildFrom[scala.collection.immutable.Map[(Int, Int),List[Int]],B,That])That
cannot be applied to (scala.collection.immutable.IndexedSeq[((Int, Int), scala.collection.immutable.Range.Inclusive)])
Map[(Int,Int),List[Int]]() ++ (for(x <- 0 to 1 ; y <- 0 to 1) yield {(x,y)-> (0 to 1)})
^
diff --git a/test/files/pos/bug1071.scala b/test/files/pos/bug1071.scala
new file mode 100644
index 0000000000..59149a021b
--- /dev/null
+++ b/test/files/pos/bug1071.scala
@@ -0,0 +1,17 @@
+class C {
+ private val a = 0
+ def getA = a
+}
+
+class D(c: C) {
+ def a = c.getA
+}
+
+object Test {
+ implicit def c2d(c: C): D = new D(c)
+
+ val c = new C
+ (c: D).a // works
+ c.a // error
+}
+
diff --git a/test/files/pos/bug4275.scala b/test/files/pos/bug4275.scala
new file mode 100644
index 0000000000..1938aceadc
--- /dev/null
+++ b/test/files/pos/bug4275.scala
@@ -0,0 +1,13 @@
+object Test {
+ def f = "abc".count(_ > 'a')
+
+ class A {
+ private val count: Int = 0
+ }
+ class B extends A { }
+ object B {
+ implicit def b2seq(x: B): Seq[Int] = Nil
+
+ def f = (new B) count (_ > 0)
+ }
+}
diff --git a/test/files/pos/spec-List.scala b/test/files/pos/spec-List.scala
index e3055f3051..04ab7d1543 100644
--- a/test/files/pos/spec-List.scala
+++ b/test/files/pos/spec-List.scala
@@ -144,7 +144,7 @@ sealed trait List[@specialized +A] extends LinearSeq[A]
/** Create a new list which contains all elements of this list
* followed by all elements of Traversable `that'
*/
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
if (b.isInstanceOf[ListBuffer[_]]) (this ::: xs.toList).asInstanceOf[That]
else super.++(xs)
diff --git a/test/files/pos/t2799.scala b/test/files/pos/t2799.scala
index fe93c0e301..7710cce26c 100644
--- a/test/files/pos/t2799.scala
+++ b/test/files/pos/t2799.scala
@@ -1 +1 @@
-@deprecated("hi mom") case class Bob ()
+@deprecated("hi mom", "") case class Bob ()
diff --git a/test/files/pos/t4402/A.scala b/test/files/pos/t4402/A.scala
new file mode 100644
index 0000000000..f43f0865f0
--- /dev/null
+++ b/test/files/pos/t4402/A.scala
@@ -0,0 +1,3 @@
+package ohmy
+
+class A extends other.Bar
diff --git a/test/files/pos/t4402/Bar.java b/test/files/pos/t4402/Bar.java
new file mode 100644
index 0000000000..edc00a5fd1
--- /dev/null
+++ b/test/files/pos/t4402/Bar.java
@@ -0,0 +1,7 @@
+package other;
+
+public class Bar extends test.Foo {
+ void createMeSth(test.Foo.Inner aaa) {
+ aaa.hello();
+ }
+}
diff --git a/test/files/pos/t4402/Foo.java b/test/files/pos/t4402/Foo.java
new file mode 100644
index 0000000000..585a5e0a2c
--- /dev/null
+++ b/test/files/pos/t4402/Foo.java
@@ -0,0 +1,8 @@
+package test;
+
+public abstract class Foo {
+ protected interface Inner {
+ public void hello();
+ }
+}
+
diff --git a/test/files/pos/t4432.scala b/test/files/pos/t4432.scala
new file mode 100644
index 0000000000..106312311a
--- /dev/null
+++ b/test/files/pos/t4432.scala
@@ -0,0 +1,42 @@
+object Main {
+ def foo1 = {
+ class A {
+ val x = {
+ lazy val cc = 1 //
+ cc
+ ()
+ }
+ }
+ new A
+ }
+
+ def foo2 = {
+ class B {
+ val x = {
+ object cc
+ cc
+ ()
+ }
+ }
+ new B
+ }
+
+ def foo3 = {
+ object C {
+ val x = {
+ lazy val cc = 1
+ cc
+ }
+ }
+ C
+ }
+
+ def foo4 = {
+ class D {
+ lazy val cc = 1
+ cc
+ }
+ new D
+ }
+
+}
diff --git a/test/files/run/pc-conversions.scala b/test/files/run/pc-conversions.scala
index 3121d82944..6be0f99355 100644
--- a/test/files/run/pc-conversions.scala
+++ b/test/files/run/pc-conversions.scala
@@ -7,7 +7,7 @@ import collection._
object Test {
def main(args: Array[String]) {
- // disabled
+ testConversions
}
def testConversions {
@@ -49,13 +49,18 @@ object Test {
assertToPar(parallel.immutable.ParHashSet(1 -> 3))
assertToParWoMap(immutable.Range(1, 10, 2))
+
+ // seq and par again conversions)
+ assertSeqPar(parallel.mutable.ParArray(1, 2, 3))
}
+ def assertSeqPar[T](pc: parallel.ParIterable[T]) = pc.seq.par == pc
+
def assertSeq[T](pc: parallel.ParIterable[T]) = assert(pc.seq == pc)
- def assertPar[T, P <: Parallel](xs: Iterable[T]) = assert(xs == xs.par)
+ def assertPar[T, P <: Parallel](xs: GenIterable[T]) = assert(xs == xs.par)
- def assertToPar[K, V](xs: Traversable[(K, V)]) {
+ def assertToPar[K, V](xs: GenTraversable[(K, V)]) {
xs match {
case _: Seq[_] =>
assert(xs.toIterable.par == xs)
@@ -73,7 +78,7 @@ object Test {
assert(xs.par.toMap == xs.toMap)
}
- def assertToParWoMap[T](xs: Seq[T]) {
+ def assertToParWoMap[T](xs: GenSeq[T]) {
assert(xs.toIterable.par == xs.toIterable)
assert(xs.par.toIterable == xs.toIterable)
diff --git a/test/files/run/t4426.scala b/test/files/run/t4426.scala
new file mode 100644
index 0000000000..1cbd42da25
--- /dev/null
+++ b/test/files/run/t4426.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc._
+
+object Test {
+ val x = {
+ val settings = new Settings()
+ settings.classpath.value = System.getProperty("java.class.path")
+
+ object cc extends Global(settings) {
+ object dummy
+
+ override def computePluginPhases() = {
+ super.computePluginPhases()
+ assert(dummy ne null, "Dummy not initialized")
+ }
+ }
+ new cc.Run
+ ()
+ }
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
+
diff --git a/test/files/run/t4459.scala b/test/files/run/t4459.scala
new file mode 100644
index 0000000000..6e1b871cbe
--- /dev/null
+++ b/test/files/run/t4459.scala
@@ -0,0 +1,12 @@
+import collection._
+
+object Test {
+ def main(args: Array[String]) {
+ for (i <- 0 until 2000) {
+ foo((0 until 10000).toSeq.par)
+ }
+ }
+
+ def foo(arg: GenSeq[_]): String = arg.map(x => x).mkString(",")
+}
+
diff --git a/test/files/scalacheck/HashTrieSplit.scala b/test/files/scalacheck/HashTrieSplit.scala
index 6b20efe12b..e959a3d535 100644
--- a/test/files/scalacheck/HashTrieSplit.scala
+++ b/test/files/scalacheck/HashTrieSplit.scala
@@ -23,8 +23,8 @@ object Test {
// println("created trie")
for (n <- 0 until (sz - 1)) {
// println("---------> n = " + n)
- val pit = ht.parallelIterator
- val pit2 = ht.parallelIterator
+ val pit = ht.splitter
+ val pit2 = ht.splitter
var i = 0
while (i < n) {
pit.next
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 744d22f05c..ac3493e56c 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -71,11 +71,11 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
(inst, fromTraversable(inst), modif)
}
- def areEqual(t1: Traversable[T], t2: Traversable[T]) = if (hasStrictOrder) {
+ def areEqual(t1: GenTraversable[T], t2: GenTraversable[T]) = if (hasStrictOrder) {
t1 == t2 && t2 == t1
} else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
- case (m1: Map[_, _], m2: Map[_, _]) => m1 == m2 && m2 == m1
- case (i1: Iterable[_], i2: Iterable[_]) =>
+ case (m1: GenMap[_, _], m2: GenMap[_, _]) => m1 == m2 && m2 == m1
+ case (i1: GenIterable[_], i2: GenIterable[_]) =>
val i1s = i1.toSet
val i2s = i2.toSet
i1s == i2s && i2s == i1s
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index 4be7b0ec4d..103b5e2993 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -30,16 +30,6 @@ class ParCollProperties extends Properties("Parallel collections") {
// parallel vectors
include(immutable.IntParallelVectorCheck)
-
- /* Views */
-
- // parallel array views
-
- // parallel immutable hash map views
-
- // parallel mutable hash map views
-
- // parallel vector views
}
diff --git a/test/pending/run/t4283/AbstractFoo.java b/test/pending/run/t4283/AbstractFoo.java
new file mode 100644
index 0000000000..0403271b74
--- /dev/null
+++ b/test/pending/run/t4283/AbstractFoo.java
@@ -0,0 +1,6 @@
+package test;
+
+/* package private */ class AbstractFoo {
+ public int t = 1;
+ public int f() { return 2; }
+} \ No newline at end of file
diff --git a/test/pending/run/t4283/IllegalAccess.scala b/test/pending/run/t4283/IllegalAccess.scala
new file mode 100644
index 0000000000..12de7e4649
--- /dev/null
+++ b/test/pending/run/t4283/IllegalAccess.scala
@@ -0,0 +1,17 @@
+package other
+
+object IllegalAccess {
+ def main(args: Array[String]) {
+ val x = (new test.ScalaBipp).make.get.asInstanceOf[test.ScalaBipp].f()
+ println(x)
+ val y = (new test.ScalaBipp).make.get.f()
+ println(y)
+ val u = (new test.ScalaBipp).make.get.asInstanceOf[test.ScalaBipp].t
+ println(u)
+ val v = (new test.ScalaBipp).make.get.t
+ println(v)
+ val sb: test.ScalaBipp = (new test.ScalaBipp).make.get
+ val z = sb.t
+ println(z)
+ }
+}
diff --git a/test/pending/run/t4283/ScalaBipp.scala b/test/pending/run/t4283/ScalaBipp.scala
new file mode 100644
index 0000000000..36dea9f4de
--- /dev/null
+++ b/test/pending/run/t4283/ScalaBipp.scala
@@ -0,0 +1,5 @@
+package test
+
+class ScalaBipp extends AbstractFoo {
+ def make: Option[ScalaBipp] = Option(this)
+}
diff --git a/test/scaladoc/resources/Trac4452.scala b/test/scaladoc/resources/Trac4452.scala
new file mode 100644
index 0000000000..b844437dba
--- /dev/null
+++ b/test/scaladoc/resources/Trac4452.scala
@@ -0,0 +1,30 @@
+/**
+ * @define MacroWithNewLine
+ * This macro should not include "*".
+ *
+ * @define MacroWithoutNewLine This macro
+ * should
+ * include "*".
+ */
+class Test
+
+/**
+ * TestA class
+ */
+class Trac4452 extends Test {
+ /** $MacroWithNewLine */
+ def a = 1
+
+ /** $MacroWithoutNewLine */
+ def b = 2
+
+ /**
+ * $MacroWithNewLine
+ */
+ def c = 3
+
+ /**
+ * $MacroWithoutNewLine
+ */
+ def d = 4
+}
diff --git a/test/scaladoc/resources/Trac4471.scala b/test/scaladoc/resources/Trac4471.scala
new file mode 100644
index 0000000000..2a22887c0d
--- /dev/null
+++ b/test/scaladoc/resources/Trac4471.scala
@@ -0,0 +1,9 @@
+class A {
+ @deprecated def foo = 123
+ @deprecated def bar = 456
+}
+
+class B {
+ def foo = 123
+ @deprecated def bar = 456
+}
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala
index c0a3f1ad95..cb31df3fb2 100644
--- a/test/scaladoc/scala/html/HtmlFactoryTest.scala
+++ b/test/scaladoc/scala/html/HtmlFactoryTest.scala
@@ -24,6 +24,7 @@ object Test extends Properties("HtmlFactory") {
import scala.tools.nsc.doc.{DocFactory, Settings}
import scala.tools.nsc.doc.model.IndexModelFactory
import scala.tools.nsc.doc.html.HtmlFactory
+ import scala.tools.nsc.doc.html.page.ReferenceIndex
def getClasspath = {
// these things can be tricky
@@ -59,6 +60,23 @@ object Test extends Properties("HtmlFactory") {
result
}
+ def createReferenceIndex(basename: String) = {
+ createFactory.makeUniverse(List("test/scaladoc/resources/"+basename)) match {
+ case Some(universe) => {
+ val index = IndexModelFactory.makeIndex(universe)
+ val pages = index.firstLetterIndex.map({
+ case (key, value) => {
+ val page = new ReferenceIndex(key, index, universe)
+ page.absoluteLinkTo(page.path) -> page.body
+ }
+ })
+ Some(pages)
+ }
+ case _ =>
+ None
+ }
+ }
+
def createTemplate(scala: String) = {
val html = scala.stripSuffix(".scala") + ".html"
createTemplates(scala)(html)
@@ -266,4 +284,26 @@ object Test extends Properties("HtmlFactory") {
case _ => false
}
}
+
+ property("Trac #4452") = {
+ createTemplate("Trac4452.scala") match {
+ case node: scala.xml.Node =>
+ ! node.toString.contains(">*")
+ case _ => false
+ }
+ }
+
+ property("Trac #4471") = {
+ createReferenceIndex("Trac4471.scala") match {
+ case Some(pages) =>
+ (pages.get("index/index-f.html") match {
+ case Some(node) => node.toString.contains(">A</a></strike>")
+ case _ => false
+ }) && (pages.get("index/index-b.html") match {
+ case Some(node) => node.toString.contains(">bar</strike>")
+ case _ => false
+ })
+ case _ => false
+ }
+ }
}
diff --git a/tools/deploy-local-maven-snapshot b/tools/deploy-local-maven-snapshot
new file mode 100755
index 0000000000..30f78cb110
--- /dev/null
+++ b/tools/deploy-local-maven-snapshot
@@ -0,0 +1,11 @@
+#!/bin/bash
+#
+# Install the -SNAPSHOT artifacts in the local maven cache.
+
+set -e
+
+cd $(dirname $0)/..
+
+ant fastdist distpack
+cd dists/maven/latest
+ant deploy.snapshot.local