summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-03-14 10:00:30 -0700
committerPaul Phillips <paulp@improving.org>2012-03-14 10:08:36 -0700
commit3ab383ae01a66208df4955bf2117dd2ea8eb2afe (patch)
treeac83e7e89310955ccbc317f57f34165f9781117c
parentb6dde2b6cf5f66e96fc92c09c1fbe3b4a8dc348c (diff)
parent5dca64cefeed4bc3289e641949b103e5e806aa32 (diff)
downloadscala-3ab383ae01a66208df4955bf2117dd2ea8eb2afe.tar.gz
scala-3ab383ae01a66208df4955bf2117dd2ea8eb2afe.tar.bz2
scala-3ab383ae01a66208df4955bf2117dd2ea8eb2afe.zip
Merge branch 'master' into merge-inline
Conflicts: lib/scala-compiler.jar.desired.sha1 lib/scala-library-src.jar.desired.sha1 lib/scala-library.jar.desired.sha1 src/compiler/scala/reflect/internal/Definitions.scala src/compiler/scala/reflect/internal/Importers.scala src/compiler/scala/reflect/internal/Symbols.scala src/compiler/scala/reflect/internal/Trees.scala src/compiler/scala/reflect/internal/Types.scala src/compiler/scala/tools/nsc/Global.scala src/compiler/scala/tools/nsc/transform/Erasure.scala src/compiler/scala/tools/nsc/transform/LiftCode.scala src/compiler/scala/tools/nsc/transform/UnCurry.scala src/compiler/scala/tools/nsc/typechecker/RefChecks.scala src/compiler/scala/tools/nsc/typechecker/Typers.scala test/files/run/programmatic-main.check test/files/speclib/instrumented.jar.desired.sha1
-rw-r--r--build.number5
-rw-r--r--build.number.maven3
-rw-r--r--build.xml203
-rw-r--r--lib/forkjoin.jar.desired.sha12
-rwxr-xr-xpull-binary-libs.sh12
-rw-r--r--src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala4
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala9
-rw-r--r--src/build/genprod.scala29
-rw-r--r--src/build/maven/continuations-plugin-pom.xml12
-rw-r--r--src/build/maven/jline-pom.xml12
-rw-r--r--src/build/maven/maven-deploy.xml122
-rw-r--r--src/build/maven/scala-compiler-pom.xml12
-rw-r--r--src/build/maven/scala-dbc-pom.xml12
-rw-r--r--src/build/maven/scala-library-pom.xml12
-rw-r--r--src/build/maven/scala-partest-pom.xml12
-rw-r--r--src/build/maven/scala-swing-pom.xml12
-rw-r--r--src/build/maven/scalap-pom.xml12
-rw-r--r--src/build/pack.xml38
-rw-r--r--src/compiler/rootdoc.txt6
-rw-r--r--src/compiler/scala/reflect/internal/AnnotationInfos.scala17
-rw-r--r--src/compiler/scala/reflect/internal/BaseTypeSeqs.scala12
-rw-r--r--src/compiler/scala/reflect/internal/ClassfileConstants.scala7
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala66
-rw-r--r--src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala50
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala13
-rw-r--r--src/compiler/scala/reflect/internal/HasFlags.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala308
-rw-r--r--src/compiler/scala/reflect/internal/Kinds.scala2
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala27
-rw-r--r--src/compiler/scala/reflect/internal/Names.scala25
-rw-r--r--src/compiler/scala/reflect/internal/Phase.scala4
-rw-r--r--src/compiler/scala/reflect/internal/Scopes.scala10
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala36
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala47
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala171
-rw-r--r--src/compiler/scala/reflect/internal/TreeGen.scala30
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala4
-rw-r--r--src/compiler/scala/reflect/internal/TreePrinters.scala46
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala42
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala385
-rw-r--r--src/compiler/scala/reflect/internal/pickling/UnPickler.scala20
-rw-r--r--src/compiler/scala/reflect/internal/util/Collections.scala22
-rw-r--r--src/compiler/scala/reflect/runtime/ConversionUtil.scala4
-rw-r--r--src/compiler/scala/reflect/runtime/Mirror.scala24
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedOps.scala20
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala38
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedTypes.scala66
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala59
-rw-r--r--src/compiler/scala/reflect/runtime/TreeBuildUtil.scala12
-rw-r--r--src/compiler/scala/reflect/runtime/Universe.scala2
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala2
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala25
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl4
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala30
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala234
-rw-r--r--src/compiler/scala/tools/nsc/MacroContext.scala4
-rw-r--r--src/compiler/scala/tools/nsc/SubComponent.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala55
-rw-r--r--src/compiler/scala/tools/nsc/ast/Reifiers.scala761
-rw-r--r--src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala75
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala115
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala77
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala20
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala71
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala53
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala50
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala639
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala1138
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala329
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala21
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala23
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala10
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala20
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Imports.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala11
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala41
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala5
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala12
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala10
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala9
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala16
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala59
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala32
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala23
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala31
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala43
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala98
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala26
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala40
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala76
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala45
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala97
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala408
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala40
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala447
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala85
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala63
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala146
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala169
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala57
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala337
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala90
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala167
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala287
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala2
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala24
-rw-r--r--src/compiler/scala/tools/nsc/util/ProxyReport.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/WeakHashSet.scala60
-rw-r--r--src/compiler/scala/tools/util/EditDistance.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala59
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala36
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala40
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java3829
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java1749
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java756
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java1590
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java113
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java31
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java81
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java85
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/package-info.java5
-rw-r--r--src/library/rootdoc.txt (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt)0
-rw-r--r--src/library/scala/AnyValCompanion.scala2
-rw-r--r--src/library/scala/Enumeration.scala6
-rw-r--r--src/library/scala/Function0.scala10
-rw-r--r--src/library/scala/Function1.scala10
-rw-r--r--src/library/scala/Function10.scala4
-rw-r--r--src/library/scala/Function11.scala4
-rw-r--r--src/library/scala/Function12.scala4
-rw-r--r--src/library/scala/Function13.scala4
-rw-r--r--src/library/scala/Function14.scala4
-rw-r--r--src/library/scala/Function15.scala4
-rw-r--r--src/library/scala/Function16.scala4
-rw-r--r--src/library/scala/Function17.scala4
-rw-r--r--src/library/scala/Function18.scala4
-rw-r--r--src/library/scala/Function19.scala4
-rw-r--r--src/library/scala/Function2.scala12
-rw-r--r--src/library/scala/Function20.scala4
-rw-r--r--src/library/scala/Function21.scala4
-rw-r--r--src/library/scala/Function22.scala4
-rw-r--r--src/library/scala/Function3.scala4
-rw-r--r--src/library/scala/Function4.scala4
-rw-r--r--src/library/scala/Function5.scala4
-rw-r--r--src/library/scala/Function6.scala4
-rw-r--r--src/library/scala/Function7.scala4
-rw-r--r--src/library/scala/Function8.scala4
-rw-r--r--src/library/scala/Function9.scala4
-rw-r--r--src/library/scala/PartialFunction.scala10
-rw-r--r--src/library/scala/Predef.scala3
-rw-r--r--src/library/scala/Specializable.scala29
-rw-r--r--src/library/scala/SpecializableCompanion.scala1
-rw-r--r--src/library/scala/StringContext.scala29
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/annotation/elidable.scala55
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala2
-rw-r--r--src/library/scala/collection/JavaConversions.scala12
-rw-r--r--src/library/scala/collection/SeqLike.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala6
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala19
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala8
-rw-r--r--src/library/scala/collection/immutable/List.scala20
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala8
-rw-r--r--src/library/scala/collection/immutable/Range.scala13
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala7
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala485
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala101
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala91
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala26
-rw-r--r--src/library/scala/collection/mutable/BasicNode.java20
-rw-r--r--src/library/scala/collection/mutable/CNodeBase.java35
-rw-r--r--src/library/scala/collection/mutable/Ctrie.scala1075
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala40
-rw-r--r--src/library/scala/collection/mutable/Gen.java18
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala30
-rw-r--r--src/library/scala/collection/mutable/INodeBase.java35
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala16
-rw-r--r--src/library/scala/collection/mutable/MainNode.java40
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala10
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala14
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala33
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala469
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala135
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala3
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala61
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala37
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala244
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala31
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala30
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala18
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala7
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala35
-rw-r--r--src/library/scala/collection/parallel/mutable/ParCtrie.scala193
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala27
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala21
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala8
-rw-r--r--src/library/scala/collection/parallel/package.scala31
-rw-r--r--src/library/scala/concurrent/Awaitable.scala24
-rw-r--r--src/library/scala/concurrent/Channel.scala1
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala111
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala5
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala132
-rw-r--r--src/library/scala/concurrent/Future.scala492
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala1
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala7
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala1
-rw-r--r--src/library/scala/concurrent/Promise.scala132
-rw-r--r--src/library/scala/concurrent/Scheduler.scala54
-rw-r--r--src/library/scala/concurrent/Task.scala13
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala1
-rw-r--r--src/library/scala/concurrent/TaskRunners.scala1
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala1
-rw-r--r--src/library/scala/concurrent/default/SchedulerImpl.scala.disabled44
-rw-r--r--src/library/scala/concurrent/default/TaskImpl.scala.disabled313
-rw-r--r--src/library/scala/concurrent/impl/AbstractPromise.java21
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala139
-rw-r--r--src/library/scala/concurrent/impl/Future.scala89
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala258
-rw-r--r--src/library/scala/concurrent/ops.scala1
-rw-r--r--src/library/scala/concurrent/package.scala57
-rw-r--r--src/library/scala/concurrent/package.scala.disabled108
-rw-r--r--src/library/scala/package.scala6
-rw-r--r--src/library/scala/reflect/ClassManifest.scala4
-rw-r--r--src/library/scala/reflect/Code.scala23
-rw-r--r--src/library/scala/reflect/Manifest.scala25
-rw-r--r--src/library/scala/reflect/ReflectionUtils.scala4
-rw-r--r--src/library/scala/reflect/api/Mirror.scala36
-rw-r--r--src/library/scala/reflect/api/Modifier.scala83
-rwxr-xr-xsrc/library/scala/reflect/api/Names.scala15
-rwxr-xr-xsrc/library/scala/reflect/api/StandardDefinitions.scala19
-rw-r--r--src/library/scala/reflect/api/StandardNames.scala21
-rwxr-xr-xsrc/library/scala/reflect/api/Symbols.scala103
-rw-r--r--src/library/scala/reflect/api/TreeBuildUtil.scala14
-rw-r--r--src/library/scala/reflect/api/TreePrinters.scala24
-rw-r--r--src/library/scala/reflect/api/Trees.scala142
-rwxr-xr-xsrc/library/scala/reflect/api/Types.scala24
-rwxr-xr-xsrc/library/scala/reflect/api/Universe.scala3
-rw-r--r--src/library/scala/reflect/macro/Context.scala25
-rw-r--r--src/library/scala/runtime/AbstractFunction1.scala2
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala4
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala11
-rw-r--r--src/library/scala/specialized.scala13
-rw-r--r--src/library/scala/sys/process/BasicIO.scala128
-rw-r--r--src/library/scala/sys/process/Process.scala67
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala306
-rw-r--r--src/library/scala/sys/process/ProcessIO.scala49
-rw-r--r--src/library/scala/sys/process/ProcessLogger.scala26
-rw-r--r--src/library/scala/sys/process/package.scala212
-rw-r--r--src/library/scala/util/Duration.scala485
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/library/scala/util/Timeout.scala33
-rw-r--r--src/library/scala/util/Try.scala165
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala7
-rw-r--r--src/manual/scala/tools/docutil/EmitManPage.scala2
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala27
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala32
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala12
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala8
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala4
-rw-r--r--src/partest/scala/tools/partest/utils/CodeTest.scala41
-rw-r--r--src/scalap/scala/tools/scalap/Classfiles.scala26
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rules.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala2
-rw-r--r--test/benchmarking/ParCtrie-bfs.scala73
-rw-r--r--test/benchmarking/ParCtrie-map.scala21
-rw-r--r--test/benchmarking/ParCtrie-nums.scala39
-rw-r--r--test/benchmarking/ParCtrie-size.scala34
-rw-r--r--test/benchmarking/TreeSetInsert.scala2
-rw-r--r--test/disabled/buildmanager/t2652/A.scala (renamed from test/files/buildmanager/t2652/A.scala)0
-rw-r--r--test/disabled/buildmanager/t2652/B.scala (renamed from test/files/buildmanager/t2652/B.scala)0
-rw-r--r--test/disabled/buildmanager/t2652/t2652.changes/A2.scala (renamed from test/files/buildmanager/t2652/t2652.changes/A2.scala)0
-rw-r--r--test/disabled/buildmanager/t2652/t2652.check (renamed from test/files/buildmanager/t2652/t2652.check)0
-rw-r--r--test/disabled/buildmanager/t2652/t2652.test (renamed from test/files/buildmanager/t2652/t2652.test)0
-rw-r--r--test/disabled/jvm/scala-concurrent-tck-akka.scala391
-rw-r--r--test/files/codelib/code.jar.desired.sha11
-rw-r--r--test/files/continuations-neg/t5445.check4
-rw-r--r--test/files/continuations-neg/t5445.scala5
-rw-r--r--test/files/continuations-run/t5506.check7
-rw-r--r--test/files/continuations-run/t5506.scala58
-rw-r--r--test/files/continuations-run/t5538.check1
-rw-r--r--test/files/continuations-run/t5538.scala50
-rw-r--r--test/files/jvm/concurrent-future.check16
-rw-r--r--test/files/jvm/concurrent-future.scala122
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala413
-rw-r--r--test/files/jvm/serialization.check16
-rw-r--r--test/files/jvm/serialization.scala12
-rw-r--r--test/files/jvm/typerep.scala2
-rw-r--r--test/files/neg/abstraction-from-volatile-type-error.check4
-rw-r--r--test/files/neg/abstraction-from-volatile-type-error.scala11
-rw-r--r--test/files/neg/constructor-prefix-error.check4
-rw-r--r--test/files/neg/constructor-prefix-error.scala7
-rw-r--r--test/files/neg/error_dependentMethodTpeConversionToFunction.check4
-rw-r--r--test/files/neg/error_dependentMethodTpeConversionToFunction.scala5
-rw-r--r--test/files/neg/error_tooManyArgsPattern.check4
-rw-r--r--test/files/neg/error_tooManyArgsPattern.scala5
-rw-r--r--test/files/neg/finitary-error.check4
-rw-r--r--test/files/neg/finitary-error.scala3
-rw-r--r--test/files/neg/implicits.check5
-rw-r--r--test/files/neg/implicits.scala16
-rw-r--r--test/files/neg/macro-argtype-mismatch.check6
-rw-r--r--test/files/neg/macro-argtype-mismatch.flags1
-rw-r--r--test/files/neg/macro-argtype-mismatch/Macros_1.scala3
-rw-r--r--test/files/neg/macro-argtype-mismatch/Test_2.scala4
-rw-r--r--test/files/neg/macro-noexpand.check4
-rw-r--r--test/files/neg/macro-noexpand.flags1
-rw-r--r--test/files/neg/macro-noexpand/Macros_1.scala3
-rw-r--r--test/files/neg/macro-noexpand/Test_2.scala4
-rw-r--r--test/files/neg/macro-noncompilertree.check6
-rw-r--r--test/files/neg/macro-noncompilertree.flags1
-rw-r--r--test/files/neg/macro-noncompilertree/Macros_1.scala3
-rw-r--r--test/files/neg/macro-nontree.check6
-rw-r--r--test/files/neg/macro-nontree.flags1
-rw-r--r--test/files/neg/macro-nontree/Macros_1.scala3
-rw-r--r--test/files/neg/parent-inherited-twice-error.check7
-rw-r--r--test/files/neg/parent-inherited-twice-error.scala2
-rw-r--r--test/files/neg/qualifying-class-error-1.check4
-rw-r--r--test/files/neg/qualifying-class-error-1.scala2
-rw-r--r--test/files/neg/qualifying-class-error-2.check4
-rw-r--r--test/files/neg/qualifying-class-error-2.scala11
-rw-r--r--test/files/neg/reify_ann2a.check4
-rw-r--r--test/files/neg/reify_ann2a.scala30
-rw-r--r--test/files/neg/reify_ann2b.check7
-rw-r--r--test/files/neg/reify_ann2b.scala30
-rw-r--r--test/files/neg/t200.check2
-rw-r--r--test/files/neg/t2779.check2
-rw-r--r--test/files/neg/t278.check2
-rw-r--r--test/files/neg/t3275.check4
-rw-r--r--test/files/neg/t3275.scala3
-rw-r--r--test/files/neg/t414.check2
-rw-r--r--test/files/neg/t452.check2
-rw-r--r--test/files/neg/t4879.check4
-rw-r--r--test/files/neg/t5189.check6
-rw-r--r--test/files/neg/t5189.scala5
-rw-r--r--test/files/neg/t5189b.check8
-rw-r--r--test/files/neg/t5189b.scala62
-rw-r--r--test/files/neg/t5358.check7
-rw-r--r--test/files/neg/t5358.scala4
-rw-r--r--test/files/neg/t5429.check132
-rw-r--r--test/files/neg/t5429.scala93
-rw-r--r--test/files/neg/t5452.check8
-rw-r--r--test/files/neg/t5452.scala29
-rw-r--r--test/files/neg/t5455.check4
-rw-r--r--test/files/neg/t5455.scala16
-rw-r--r--test/files/neg/t5493.check4
-rw-r--r--test/files/neg/t5493.scala3
-rw-r--r--test/files/neg/t5497.check4
-rw-r--r--test/files/neg/t5497.scala5
-rw-r--r--test/files/neg/t5529.check12
-rw-r--r--test/files/neg/t5529.scala13
-rw-r--r--test/files/neg/t5553_1.check54
-rw-r--r--test/files/neg/t5553_1.scala34
-rw-r--r--test/files/neg/t5553_2.check50
-rw-r--r--test/files/neg/t5553_2.scala59
-rw-r--r--test/files/neg/t5554.check67
-rw-r--r--test/files/neg/t5554.scala39
-rw-r--r--test/files/neg/t591.check2
-rw-r--r--test/files/neg/t800.check6
-rw-r--r--test/files/neg/t935.check5
-rw-r--r--test/files/neg/t960.check7
-rw-r--r--test/files/neg/t960.scala4
-rw-r--r--test/files/neg/tailrec-2.check7
-rw-r--r--test/files/neg/tailrec-2.scala3
-rw-r--r--test/files/pos/Transactions.scala2
-rw-r--r--test/files/pos/existentials-harmful.scala (renamed from test/pending/pos/existentials-harmful.scala)0
-rw-r--r--test/files/pos/existentials.scala22
-rw-r--r--test/files/pos/local-objects.scala (renamed from test/pending/pos/local-objects.scala)0
-rw-r--r--test/files/pos/package-case.scala (renamed from test/pending/pos/package-case.scala)0
-rw-r--r--test/files/pos/spec-Function1.scala2
-rw-r--r--test/files/pos/spec-groups.scala65
-rw-r--r--test/files/pos/specialize10.scala7
-rw-r--r--test/files/pos/spurious-overload.scala32
-rw-r--r--test/files/pos/t1957.scala (renamed from test/pending/pos/t1957.scala)0
-rw-r--r--test/files/pos/t1987b/a.scala (renamed from test/pending/pos/t1987/a.scala)0
-rw-r--r--test/files/pos/t1987b/b.scala (renamed from test/pending/pos/t1987/b.scala)0
-rw-r--r--test/files/pos/t2194.scala (renamed from test/pending/pos/t2194.scala)0
-rw-r--r--test/files/pos/t531.scala4
-rw-r--r--test/files/pos/t532.scala4
-rw-r--r--test/files/pos/t5406.scala4
-rw-r--r--test/files/pos/t5444.scala42
-rw-r--r--test/files/pos/t5541.scala61
-rw-r--r--test/files/pos/t5546.scala1
-rw-r--r--test/files/presentation/shutdown-deadlock.check3
-rw-r--r--test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala45
-rw-r--r--test/files/presentation/shutdown-deadlock/src/arrays.scala937
-rw-r--r--test/files/run/code.check36
-rw-r--r--test/files/run/code.scala60
-rw-r--r--test/files/run/ctries/DumbHash.scala14
-rw-r--r--test/files/run/ctries/Wrap.scala9
-rw-r--r--test/files/run/ctries/concmap.scala188
-rw-r--r--test/files/run/ctries/iterator.scala289
-rw-r--r--test/files/run/ctries/lnode.scala61
-rw-r--r--test/files/run/ctries/main.scala45
-rw-r--r--test/files/run/ctries/snapshot.scala267
-rw-r--r--test/files/run/elidable-opt.check14
-rw-r--r--test/files/run/elidable-opt.flags1
-rw-r--r--test/files/run/elidable-opt.scala85
-rw-r--r--test/files/run/elidable.check15
-rw-r--r--test/files/run/elidable.scala73
-rw-r--r--test/files/run/existentials-in-compiler.check156
-rw-r--r--test/files/run/existentials-in-compiler.scala83
-rw-r--r--test/files/run/existentials3.check22
-rw-r--r--test/files/run/existentials3.scala73
-rw-r--r--test/files/run/interpolation.check6
-rw-r--r--test/files/run/interpolation.scala6
-rw-r--r--test/files/run/interpolationMultiline1.check26
-rw-r--r--test/files/run/interpolationMultiline1.flags1
-rw-r--r--test/files/run/interpolationMultiline1.scala26
-rw-r--r--test/files/run/interpolationMultiline2.check26
-rw-r--r--test/files/run/interpolationMultiline2.flags1
-rw-r--r--test/files/run/interpolationMultiline2.scala21
-rw-r--r--test/files/run/lub-visibility.check14
-rw-r--r--test/files/run/lub-visibility.scala8
-rw-r--r--test/files/run/macro-basic.check1
-rw-r--r--test/files/run/macro-basic.flags1
-rw-r--r--test/files/run/macro-basic/Macros_1.scala10
-rw-r--r--test/files/run/macro-basic/Test_2.scala4
-rw-r--r--test/files/run/macro-repl-basic.check25
-rw-r--r--test/files/run/macro-repl-basic.scala18
-rw-r--r--test/files/run/macro-repl-dontexpand.check9
-rw-r--r--test/files/run/macro-repl-dontexpand.scala8
-rw-r--r--test/files/run/macro-rettype-mismatch.check5
-rw-r--r--test/files/run/macro-rettype-mismatch.flags1
-rw-r--r--test/files/run/macro-rettype-mismatch/Macros_1.scala3
-rw-r--r--test/files/run/macro-rettype-mismatch/Test_2.scala16
-rw-r--r--test/files/run/manifests.scala19
-rw-r--r--test/files/run/reflection-implClass.scala16
-rw-r--r--test/files/run/reify_ann1a.check30
-rw-r--r--test/files/run/reify_ann1a.scala30
-rw-r--r--test/files/run/reify_ann1b.check30
-rw-r--r--test/files/run/reify_ann1b.scala30
-rw-r--r--test/files/run/reify_anonymous.scala3
-rw-r--r--test/files/run/reify_classfileann_a.check18
-rw-r--r--test/files/run/reify_classfileann_a.scala24
-rw-r--r--test/files/run/reify_closure1.scala7
-rw-r--r--test/files/run/reify_closure2a.scala7
-rw-r--r--test/files/run/reify_closure3a.scala7
-rw-r--r--test/files/run/reify_closure4a.scala7
-rw-r--r--test/files/run/reify_closure5a.scala7
-rw-r--r--test/files/run/reify_closure6.scala7
-rw-r--r--test/files/run/reify_closure7.scala7
-rw-r--r--test/files/run/reify_closure8a.scala3
-rw-r--r--test/files/run/reify_closures10.scala3
-rw-r--r--test/files/run/reify_complex.scala3
-rw-r--r--test/files/run/reify_extendbuiltins.scala3
-rw-r--r--test/files/run/reify_for1.scala3
-rw-r--r--test/files/run/reify_fors.check (renamed from test/pending/run/reify_fors.check)0
-rw-r--r--test/files/run/reify_fors.scala (renamed from test/pending/run/reify_fors.scala)5
-rw-r--r--test/files/run/reify_generic.scala3
-rw-r--r--test/files/run/reify_generic2.scala3
-rw-r--r--test/files/run/reify_getter.scala3
-rw-r--r--test/files/run/reify_implicits.scala3
-rw-r--r--test/files/run/reify_inheritance.scala3
-rw-r--r--test/files/run/reify_inner1.check1
-rw-r--r--test/files/run/reify_inner1.scala21
-rw-r--r--test/files/run/reify_inner2.check1
-rw-r--r--test/files/run/reify_inner2.scala21
-rw-r--r--test/files/run/reify_inner3.check1
-rw-r--r--test/files/run/reify_inner3.scala21
-rw-r--r--test/files/run/reify_inner4.check1
-rw-r--r--test/files/run/reify_inner4.scala21
-rw-r--r--test/files/run/reify_maps.check (renamed from test/pending/run/reify_maps.check)0
-rw-r--r--test/files/run/reify_maps.scala (renamed from test/pending/run/reify_maps.scala)3
-rw-r--r--test/files/run/reify_printf.scala3
-rw-r--r--test/files/run/reify_sort.scala3
-rw-r--r--test/files/run/reify_sort1.scala3
-rw-r--r--test/files/run/reify_this.scala5
-rw-r--r--test/files/run/reify_timeofday.check (renamed from test/pending/run/reify_timeofday.check)0
-rw-r--r--test/files/run/reify_timeofday.scala (renamed from test/pending/run/reify_timeofday.scala)3
-rw-r--r--test/files/run/reify_varargs.scala3
-rw-r--r--test/files/run/spec-nlreturn.check2
-rw-r--r--test/files/run/spec-nlreturn.scala16
-rw-r--r--test/files/run/synchronized.check128
-rw-r--r--test/files/run/synchronized.flags1
-rw-r--r--test/files/run/synchronized.scala449
-rw-r--r--test/files/run/t1195.check6
-rw-r--r--test/files/run/t1195.scala26
-rw-r--r--test/files/run/t2296a.check2
-rw-r--r--test/files/run/t2296a/J.java7
-rw-r--r--test/files/run/t2296a/S.scala18
-rw-r--r--test/files/run/t2296b.check2
-rw-r--r--test/files/run/t2296b/J_1.java7
-rw-r--r--test/files/run/t2296b/S_2.scala18
-rw-r--r--test/files/run/t3569.check17
-rw-r--r--test/files/run/t3569.flags1
-rw-r--r--test/files/run/t3569.scala32
-rw-r--r--test/files/run/t3575.check20
-rw-r--r--test/files/run/t3575.scala55
-rw-r--r--test/files/run/t4147.scala (renamed from test/files/run/si4147.scala)0
-rw-r--r--test/files/run/t4171.check3
-rw-r--r--test/files/run/t4171.scala11
-rw-r--r--test/files/run/t4542.check3
-rw-r--r--test/files/run/t4770.check2
-rw-r--r--test/files/run/t4770.scala15
-rw-r--r--test/files/run/t4777.check2
-rw-r--r--test/files/run/t4777.scala8
-rw-r--r--test/files/run/t4875.check17
-rw-r--r--test/files/run/t4875.scala12
-rw-r--r--test/files/run/t5171.check1
-rw-r--r--test/files/run/t5171.scala7
-rw-r--r--test/files/run/t5224.check (renamed from test/pending/run/t5224.check)4
-rw-r--r--test/files/run/t5224.scala9
-rw-r--r--test/files/run/t5225_1.check4
-rw-r--r--test/files/run/t5225_1.scala7
-rw-r--r--test/files/run/t5225_2.check4
-rw-r--r--test/files/run/t5225_2.scala7
-rw-r--r--test/files/run/t5229_1.check (renamed from test/pending/run/t5229_1.check)0
-rw-r--r--test/files/run/t5229_1.scala (renamed from test/pending/run/t5229_1.scala)3
-rw-r--r--test/files/run/t5229_2.check2
-rw-r--r--test/files/run/t5229_2.scala (renamed from test/pending/run/t5229_2.scala)3
-rw-r--r--test/files/run/t5230.check2
-rw-r--r--test/files/run/t5230.scala3
-rw-r--r--test/files/run/t5258a.scala3
-rw-r--r--test/files/run/t5262.check (renamed from test/files/run/si5262.check)0
-rw-r--r--test/files/run/t5262.scala (renamed from test/files/run/si5262.scala)0
-rw-r--r--test/files/run/t5266_1.check2
-rw-r--r--test/files/run/t5266_1.scala3
-rw-r--r--test/files/run/t5266_2.check2
-rw-r--r--test/files/run/t5266_2.scala3
-rw-r--r--test/files/run/t5269.scala3
-rw-r--r--test/files/run/t5270.check (renamed from test/pending/run/t5270.check)0
-rw-r--r--test/files/run/t5270.scala (renamed from test/pending/run/t5270.scala)3
-rw-r--r--test/files/run/t5271_1.check11
-rw-r--r--test/files/run/t5271_1.scala13
-rw-r--r--test/files/run/t5271_2.check12
-rw-r--r--test/files/run/t5271_2.scala15
-rw-r--r--test/files/run/t5271_3.check19
-rw-r--r--test/files/run/t5271_3.scala16
-rw-r--r--test/files/run/t5271_4.check0
-rw-r--r--test/files/run/t5271_4.scala13
-rw-r--r--test/files/run/t5272_1.check1
-rw-r--r--test/files/run/t5272_1.scala (renamed from test/pending/run/t5272.scala)3
-rw-r--r--test/files/run/t5272_2.check1
-rw-r--r--test/files/run/t5272_2.scala15
-rw-r--r--test/files/run/t5273_1.check (renamed from test/pending/run/t5273_2.check)0
-rw-r--r--test/files/run/t5273_1.scala (renamed from test/pending/run/t5273_2.scala)3
-rw-r--r--test/files/run/t5273_2a.check1
-rw-r--r--test/files/run/t5273_2a.scala14
-rw-r--r--test/files/run/t5273_2b.check (renamed from test/pending/run/t5273_1.check)0
-rw-r--r--test/files/run/t5273_2b.scala (renamed from test/pending/run/t5273_1.scala)3
-rw-r--r--test/files/run/t5274_1.scala3
-rw-r--r--test/files/run/t5274_2.scala3
-rw-r--r--test/files/run/t5275.scala3
-rw-r--r--test/files/run/t5276_1a.check1
-rw-r--r--test/files/run/t5276_1a.scala (renamed from test/pending/run/t5276.scala)5
-rw-r--r--test/files/run/t5276_1b.check1
-rw-r--r--test/files/run/t5276_1b.scala14
-rw-r--r--test/files/run/t5276_2a.check1
-rw-r--r--test/files/run/t5276_2a.scala17
-rw-r--r--test/files/run/t5276_2b.check1
-rw-r--r--test/files/run/t5276_2b.scala18
-rw-r--r--test/files/run/t5277_1.scala3
-rw-r--r--test/files/run/t5277_2.scala3
-rw-r--r--test/files/run/t5279.scala3
-rw-r--r--test/files/run/t5293-map.scala88
-rw-r--r--test/files/run/t5293.scala4
-rw-r--r--test/files/run/t5334_1.check1
-rw-r--r--test/files/run/t5334_1.scala (renamed from test/pending/run/t5334_1.scala)5
-rw-r--r--test/files/run/t5334_2.check1
-rw-r--r--test/files/run/t5334_2.scala (renamed from test/pending/run/t5334_2.scala)5
-rw-r--r--test/files/run/t5335.scala3
-rw-r--r--test/files/run/t5374.check (renamed from test/files/run/si5374.check)0
-rw-r--r--test/files/run/t5374.scala (renamed from test/files/run/si5374.scala)0
-rw-r--r--test/files/run/t5375.check (renamed from test/files/run/si5375.check)0
-rw-r--r--test/files/run/t5375.scala (renamed from test/files/run/si5375.scala)0
-rw-r--r--test/files/run/t5380.scala6
-rw-r--r--test/files/run/t5419.check1
-rw-r--r--test/files/run/t5419.scala9
-rw-r--r--test/files/run/t5423.scala4
-rw-r--r--test/files/run/t5488-fn.check17
-rw-r--r--test/files/run/t5488-fn.scala27
-rw-r--r--test/files/run/t5488.check14
-rw-r--r--test/files/run/t5488.scala26
-rw-r--r--test/files/run/t5500.check2
-rw-r--r--test/files/run/t5500.scala12
-rw-r--r--test/files/run/t5500b.check28
-rw-r--r--test/files/run/t5500b.scala51
-rw-r--r--test/files/run/t5527.check99
-rw-r--r--test/files/run/t5527.scala107
-rw-r--r--test/files/run/t5530.check2
-rw-r--r--test/files/run/t5530.flags1
-rw-r--r--test/files/run/t5530.scala4
-rw-r--r--test/files/run/t5532.flags1
-rw-r--r--test/files/run/t5532.scala4
-rw-r--r--test/files/run/t5537.check20
-rw-r--r--test/files/run/t5537.scala10
-rw-r--r--test/files/run/t5545.check0
-rw-r--r--test/files/run/t5545.scala27
-rw-r--r--test/files/run/tailcalls.check3
-rw-r--r--test/files/run/test-cpp.check73
-rw-r--r--test/files/run/test-cpp.scala104
-rw-r--r--test/files/run/virtpatmat_partial.check4
-rw-r--r--test/files/run/virtpatmat_partial.scala23
-rw-r--r--test/files/run/virtpatmat_switch.scala8
-rw-r--r--test/files/run/virtpatmat_try.check2
-rw-r--r--test/files/run/virtpatmat_try.flags1
-rw-r--r--test/files/run/virtpatmat_try.scala47
-rw-r--r--test/files/scalacheck/Ctrie.scala199
-rw-r--r--test/files/scalacheck/avl.scala18
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala98
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala2
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala3
-rw-r--r--test/files/scalacheck/redblack.scala64
-rw-r--r--test/files/scalacheck/redblacktree.scala216
-rw-r--r--test/files/scalacheck/treemap.scala154
-rw-r--r--test/files/scalacheck/treeset.scala152
-rw-r--r--test/files/specialized/SI-5005.check33
-rw-r--r--test/files/specialized/SI-5005.scala27
-rw-r--r--test/files/specialized/arrays-traits.check8
-rw-r--r--test/files/specialized/arrays-traits.scala17
-rw-r--r--test/files/specialized/arrays.check6
-rw-r--r--test/instrumented/boxes.patch29
-rw-r--r--test/instrumented/library/scala/runtime/BoxesRunTime.java116
-rw-r--r--test/instrumented/library/scala/runtime/ScalaRunTime.scala255
-rwxr-xr-xtest/instrumented/mkinstrumented46
-rwxr-xr-xtest/instrumented/mkinstrumented.sh51
-rw-r--r--test/instrumented/srt.patch23
-rw-r--r--test/pending/pos/bug4704.scala36
-rw-r--r--test/pending/pos/inference.scala39
-rw-r--r--test/pending/pos/nothing.scala24
-rw-r--r--test/pending/pos/t1380/gnujaxp.jar.desired.sha11
-rw-r--r--test/pending/pos/t1380/hallo.scala3
-rw-r--r--test/pending/pos/t4012.scala7
-rw-r--r--test/pending/pos/t4123.scala14
-rw-r--r--test/pending/pos/t4436.scala3
-rw-r--r--test/pending/pos/t4541.scala10
-rw-r--r--test/pending/pos/t4717.scala7
-rw-r--r--test/pending/pos/t4786.scala24
-rw-r--r--test/pending/pos/t4787.scala4
-rw-r--r--test/pending/pos/t4790.scala4
-rw-r--r--test/pending/pos/t5259.scala14
-rw-r--r--test/pending/pos/t5399.scala15
-rw-r--r--test/pending/pos/t5400.scala14
-rw-r--r--test/pending/pos/those-kinds-are-high.scala18
-rw-r--r--test/pending/run/bug4704run.scala10
-rw-r--r--test/pending/run/macro-overload.check4
-rw-r--r--test/pending/run/macro-overload.flags1
-rw-r--r--test/pending/run/macro-overload/Macros_1.scala9
-rw-r--r--test/pending/run/macro-overload/Test_2.scala6
-rw-r--r--test/pending/run/reify_addressbook.scala3
-rw-r--r--test/pending/run/reify_brainf_ck.scala3
-rw-r--r--test/pending/run/reify_callccinterpreter.scala3
-rw-r--r--test/pending/run/reify_classfileann_b.check0
-rw-r--r--test/pending/run/reify_classfileann_b.scala24
-rw-r--r--test/pending/run/reify_closure2b.scala7
-rw-r--r--test/pending/run/reify_closure3b.scala7
-rw-r--r--test/pending/run/reify_closure4b.scala7
-rw-r--r--test/pending/run/reify_closure5b.scala7
-rw-r--r--test/pending/run/reify_closure8b.scala3
-rw-r--r--test/pending/run/reify_closure9a.scala3
-rw-r--r--test/pending/run/reify_closure9b.scala3
-rw-r--r--test/pending/run/reify_closures11.scala3
-rw-r--r--test/pending/run/reify_csv.scala3
-rw-r--r--test/pending/run/reify_gadts.scala3
-rw-r--r--test/pending/run/reify_lazyevaluation.scala3
-rw-r--r--test/pending/run/reify_properties.scala3
-rw-r--r--test/pending/run/reify_simpleinterpreter.scala3
-rw-r--r--test/pending/run/t3702.scala10
-rw-r--r--test/pending/run/t3705.scala17
-rw-r--r--test/pending/run/t3832.scala7
-rw-r--r--test/pending/run/t4098.scala9
-rw-r--r--test/pending/run/t4415.scala86
-rw-r--r--test/pending/run/t4460.scala12
-rw-r--r--test/pending/run/t4511.scala10
-rw-r--r--test/pending/run/t4511b.scala25
-rw-r--r--test/pending/run/t4971.scala16
-rw-r--r--test/pending/run/t4996.scala15
-rw-r--r--test/pending/run/t5224.scala8
-rw-r--r--test/pending/run/t5225_1.check4
-rw-r--r--test/pending/run/t5225_1.scala8
-rw-r--r--test/pending/run/t5225_2.check4
-rw-r--r--test/pending/run/t5225_2.scala8
-rw-r--r--test/pending/run/t5229_1_nolift.scala1
-rw-r--r--test/pending/run/t5229_2.check2
-rw-r--r--test/pending/run/t5258b.scala3
-rw-r--r--test/pending/run/t5258c.scala3
-rw-r--r--test/pending/run/t5271_1.scala3
-rw-r--r--test/pending/run/t5271_2.scala3
-rw-r--r--test/pending/run/t5271_3.check1
-rw-r--r--test/pending/run/t5271_3.scala16
-rw-r--r--test/pending/run/t5272.check1
-rw-r--r--test/pending/run/t5276.check1
-rw-r--r--test/pending/run/t5284.scala14
-rw-r--r--test/pending/run/t5418.scala3
-rw-r--r--test/scaladoc/resources/implicit-inheritance-override.scala41
-rw-r--r--test/scaladoc/resources/implicit-inheritance-usecase.scala57
-rw-r--r--test/scaladoc/scala/html/HtmlFactoryTest.scala189
-rwxr-xr-xtools/binary-repo-lib.sh3
-rwxr-xr-xtools/get-scala-revision5
-rwxr-xr-xtools/verify-jar-cache33
741 files changed, 26906 insertions, 8890 deletions
diff --git a/build.number b/build.number
deleted file mode 100644
index 91c7e72c85..0000000000
--- a/build.number
+++ /dev/null
@@ -1,5 +0,0 @@
-#Tue Sep 11 19:21:09 CEST 2007
-version.minor=10
-version.patch=0
-version.suffix=alpha
-version.major=2
diff --git a/build.number.maven b/build.number.maven
new file mode 100644
index 0000000000..eed9f3897c
--- /dev/null
+++ b/build.number.maven
@@ -0,0 +1,3 @@
+version.major=2
+version.minor=10
+version.patch=0
diff --git a/build.xml b/build.xml
index 57d2eed1c0..0a3f7e782f 100644
--- a/build.xml
+++ b/build.xml
@@ -210,28 +210,33 @@ PROPERTIES
INITIALISATION
============================================================================ -->
- <condition property="starr.absent">
- <not><available file="${lib.dir}/scala-library.jar"/></not>
- </condition>
-
- <!-- It's such a PITA to perform the amazing && in ant, forget it.
- I'll just check the compiler jar. -->
- <target name="init.starr.check">
- <uptodate property="starr.compiler.uptodate"
- srcfile="${lib.dir}/scala-compiler.jar.desired.sha1"
- targetfile="${lib.dir}/scala-compiler.jar" />
+ <target name="init.jars.check">
+ <uptodate property="lib.jars.uptodate">
+ <srcfiles dir="${basedir}">
+ <include name="lib/**/*.desired.sha1"/>
+ <include name="test/files/**/*.desired.sha1"/>
+ <include name="tools/**/*.desired.sha1"/>
+ </srcfiles>
+ <mapper type="glob" from="*.desired.sha1" to="*"/>
+ </uptodate>
</target>
- <target name="init.starr" depends="init.starr.check" unless="starr.compiler.uptodate">
+ <target name="init.jars" depends="init.jars.check" unless="lib.jars.uptodate">
<echo level="warn" message="Updating bootstrap libs. (To do this by hand, run ./pull-binary-libs.sh)"/>
<exec osfamily="unix" vmlauncher="false" executable="./pull-binary-libs.sh" failifexecutionfails="true" />
<exec osfamily="windows" vmlauncher="false" executable="pull-binary-libs.sh" failifexecutionfails="true" />
<!-- uptodate task needs to know these are what's in the sha. -->
- <touch file="${lib.dir}/scala-library.jar" />
- <touch file="${lib.dir}/scala-compiler.jar" />
+ <touch>
+ <fileset dir="${basedir}">
+ <include name="lib/**/*.desired.sha1"/>
+ <include name="test/files/**/*.desired.sha1"/>
+ <include name="tools/**/*.desired.sha1"/>
+ </fileset>
+ <mapper type="glob" from="*.desired.sha1" to="*"/>
+ </touch>
</target>
- <target name="init" depends="init.starr">
+ <target name="init" depends="init.jars">
<!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.optimise" value=""/>
<!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
@@ -1505,20 +1510,33 @@ LIBRARIES (MSIL, FJBG maybe later)
DOCUMENTATION
============================================================================ -->
- <target name="docs.start" depends="pack.done"/>
+ <target name="docs.start" depends="pack.done">
+ <macrodef name="doc-uptodate-check">
+ <attribute name="name" />
+ <attribute name="srcdir" />
+ <element name="source-includes" optional="yes" />
+ <sequential>
+ <uptodate property="docs.@{name}.available" targetfile="${build-docs.dir}/@{name}.complete">
+ <srcfiles dir="@{srcdir}">
+ <source-includes/>
+ </srcfiles>
+ </uptodate>
+ </sequential>
+ </macrodef>
+ </target>
<target name="docs.pre-lib" depends="docs.start">
- <uptodate property="docs.lib.available" targetfile="${build-docs.dir}/library.complete">
- <srcfiles dir="${src.dir}">
+ <doc-uptodate-check name="library" srcdir="${src.dir}">
+ <source-includes>
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
<include name="swing/**"/>
- </srcfiles>
- </uptodate>
+ </source-includes>
+ </doc-uptodate-check>
</target>
- <target name="docs.lib" depends="docs.pre-lib" unless="docs.lib.available">
+ <target name="docs.lib" depends="docs.pre-lib" unless="docs.library.available">
<stopwatch name="docs.lib.timer"/>
<mkdir dir="${build-docs.dir}/library"/>
<scaladoc
@@ -1530,7 +1548,8 @@ DOCUMENTATION
docUncompilable="${src.dir}/library-aux"
sourcepath="${src.dir}"
classpathref="pack.classpath"
- docRootContent="${build-docs.dir}/library/lib/rootdoc.txt">
+ addparams="${scalac.args.all}"
+ docRootContent="${src.dir}/library/rootdoc.txt">
<src>
<files includes="${src.dir}/actors"/>
<files includes="${src.dir}/library/scala"/>
@@ -1552,12 +1571,10 @@ DOCUMENTATION
</target>
<target name="docs.pre-man" depends="docs.lib">
- <uptodate property="docs.man.available" targetfile="${build-docs.dir}/manual.complete">
- <srcfiles dir="${src.dir}/manual"/>
- </uptodate>
+ <doc-uptodate-check name="manual" srcdir="${src.dir}/manual"/>
</target>
- <target name="docs.manmaker" depends="docs.pre-man" unless="docs.man.available">
+ <target name="docs.manmaker" depends="docs.pre-man" unless="docs.manual.available">
<mkdir dir="${build.dir}/manmaker/classes"/>
<scalac
destdir="${build.dir}/manmaker/classes"
@@ -1571,7 +1588,7 @@ DOCUMENTATION
</path>
</target>
- <target name="docs.man" depends="docs.manmaker" unless="docs.man.available">
+ <target name="docs.man" depends="docs.manmaker" unless="docs.manual.available">
<mkdir dir="${build-docs.dir}/manual/man/man1"/>
<mkdir dir="${build-docs.dir}/manual/html"/>
<mkdir dir="${build-docs.dir}/manual/genman/man1"/>
@@ -1598,12 +1615,10 @@ DOCUMENTATION
</target>
<target name="docs.pre-comp" depends="docs.man">
- <uptodate property="docs.comp.available" targetfile="${build-docs.dir}/compiler.complete">
- <srcfiles dir="${src.dir}/compiler"/>
- </uptodate>
+ <doc-uptodate-check name="compiler" srcdir="${src.dir}/compiler"/>
</target>
- <target name="docs.comp" depends="docs.pre-comp" unless="docs.comp.available">
+ <target name="docs.comp" depends="docs.pre-comp" unless="docs.compiler.available">
<stopwatch name="docs.comp.timer"/>
<mkdir dir="${build-docs.dir}/compiler"/>
<scaladoc
@@ -1613,15 +1628,105 @@ DOCUMENTATION
docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1"
sourcepath="${src.dir}"
classpathref="pack.classpath"
- srcdir="${src.dir}/compiler">
+ srcdir="${src.dir}/compiler"
+ docRootContent="${src.dir}/compiler/rootdoc.txt"
+ addparams="${scalac.args.all}">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/compiler.complete" verbose="no"/>
<stopwatch name="docs.comp.timer" action="total"/>
</target>
+ <target name="docs.pre-jline" depends="docs.start">
+ <doc-uptodate-check name="jline" srcdir="${src.dir}/jline/src/main/java" />
+ </target>
+
+ <target name="docs.jline" depends="docs.pre-jline" unless="docs.jline.available">
+ <stopwatch name="docs.jline.timer"/>
+ <mkdir dir="${build-docs.dir}/jline"/>
+ <scaladoc
+ destdir="${build-docs.dir}/jline"
+ doctitle="Scala JLine"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.classpath"
+ srcdir="${src.dir}/jline/src/main/java"
+ addparams="${scalac.args.all}">
+ <include name="**/*.scala"/>
+ <include name="**/*.java"/>
+ </scaladoc>
+ <touch file="${build-docs.dir}/jline.complete" verbose="no"/>
+ <stopwatch name="docs.jline.timer" action="total"/>
+ </target>
+
+ <target name="docs.pre-scalap" depends="docs.start">
+ <doc-uptodate-check name="scalap" srcdir="${src.dir}/scalap" />
+ </target>
+
+ <target name="docs.scalap" depends="docs.pre-scalap" unless="docs.scalap.available">
+ <stopwatch name="docs.scalap.timer"/>
+ <mkdir dir="${build-docs.dir}/scalap"/>
+ <scaladoc
+ destdir="${build-docs.dir}/scalap"
+ doctitle="Scalap"
+ docversion="${version.number}"
+ docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1"
+ sourcepath="${src.dir}"
+ classpathref="pack.classpath"
+ srcdir="${src.dir}/scalap"
+ addparams="${scalac.args.all}">
+ <include name="**/*.scala"/>
+ </scaladoc>
+ <touch file="${build-docs.dir}/scalap.complete" verbose="no"/>
+ <stopwatch name="docs.scalap.timer" action="total"/>
+ </target>
+
+ <target name="docs.pre-partest" depends="docs.start">
+ <doc-uptodate-check name="partest" srcdir="${src.dir}/partest" />
+ </target>
+
+ <target name="docs.partest" depends="docs.pre-partest" unless="docs.partest.available">
+ <stopwatch name="docs.partest.timer"/>
+ <mkdir dir="${build-docs.dir}/scala-partest"/>
+ <scaladoc
+ destdir="${build-docs.dir}/scala-partest"
+ doctitle="Scala Parallel Testing Framework"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.classpath"
+ srcdir="${src.dir}/partest"
+ addparams="${scalac.args.all}">
+ <include name="**/*.scala"/>
+ </scaladoc>
+ <touch file="${build-docs.dir}/partest.complete" verbose="no"/>
+ <stopwatch name="docs.partest.timer" action="total"/>
+ </target>
+
+ <target name="docs.pre-continuations-plugin" depends="docs.start">
+ <doc-uptodate-check name="continuations-plugin" srcdir="${src.dir}/continuations/plugin" />
+ </target>
+
+ <target name="docs.continuations-plugin" depends="docs.pre-continuations-plugin" unless="docs.continuations-plugin.available">
+ <stopwatch name="docs.continuations-plugin.timer"/>
+ <mkdir dir="${build-docs.dir}/continuations-plugin"/>
+ <scaladoc
+ destdir="${build-docs.dir}/continuations-plugin"
+ doctitle="Delimited Continuations Compiler Plugin"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.classpath"
+ srcdir="${src.dir}/continuations/plugin"
+ addparams="${scalac.args.all}">
+ <include name="**/*.scala"/>
+ </scaladoc>
+ <touch file="${build-docs.dir}/continuations-plugin.complete" verbose="no"/>
+ <stopwatch name="docs.continuations-plugin.timer" action="total"/>
+ </target>
+
<target name="docs.done" depends="docs.man"/>
+ <target name="docs.all" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
+
<target name="docs.clean">
<delete dir="${build-docs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
<delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/>
@@ -1905,18 +2010,41 @@ STABLE REFERENCE (STARR)
<include name="forkjoin.jar"/>
</fileset>
</copy>
+ <!-- remove SHA1 files for no starr, so we don't loose artifacts. -->
+ <delete>
+ <fileset dir="${lib.dir}">
+ <include name="fjbg.jar.desired.sha1"/>
+ <include name="msil.jar.desired.sha1"/>
+ <include name="forkjoin.jar.desired.sha1"/>
+ </fileset>
+ </delete>
</target>
- <target name="starr.done" depends="starr.libs"/>
+ <target name="starr.removesha1" depends="starr.libs">
+ <!-- remove SHA1 files for no starr, so we don't loose artifacts. -->
+ <delete>
+ <fileset dir="${lib.dir}">
+ <include name="scala-compiler.jar.desired.sha1"/>
+ <include name="scala-library.jar.desired.sha1"/>
+ <include name="scala-library-src.jar.desired.sha1"/>
+ </fileset>
+ </delete>
+ </target>
+
+ <target name="starr.done" depends="starr.libs, starr.removesha1"/>
<!-- ===========================================================================
FORWARDED TARGETS FOR PACKAGING
============================================================================ -->
- <target name="distpack" depends="dist.done">
+ <target name="distpack" depends="dist.done, docs.all">
<ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
</target>
+ <target name="distpack-maven" depends="dist.done, docs.all">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/>
+ </target>
+
<target name="distpack-opt"
description="Builds an optimised distribution.">
<antcall target="distpack">
@@ -1924,6 +2052,13 @@ FORWARDED TARGETS FOR PACKAGING
</antcall>
</target>
+ <target name="distpack-maven-opt"
+ description="Builds an optimised maven distribution.">
+ <antcall target="distpack-maven">
+ <param name="scalac.args.optimise" value="-optimise"/>
+ </antcall>
+ </target>
+
<!-- Used by the scala-installer script -->
<target name="allallclean" depends="all.clean"/>
@@ -1937,7 +2072,7 @@ FORWARDED TARGETS FOR NIGHTLY BUILDS
</antcall>
</target>
- <target name="nightly-nopt" depends="all.done">
+ <target name="nightly-nopt" depends="all.done, docs.all">
<!-- cannot antcall all.done, the properties defined in there (dist.dir) are not returned. need depends. -->
<ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
</target>
diff --git a/lib/forkjoin.jar.desired.sha1 b/lib/forkjoin.jar.desired.sha1
index 758ecb4baf..b8c48df830 100644
--- a/lib/forkjoin.jar.desired.sha1
+++ b/lib/forkjoin.jar.desired.sha1
@@ -1 +1 @@
-12c479a33ee283599fdb7aa91d6a1df0197a52cf ?forkjoin.jar
+e29a62ba3abe56ba004b344e22be86dbeb12176f ?forkjoin.jar
diff --git a/pull-binary-libs.sh b/pull-binary-libs.sh
index 65d95908a3..6c94e39fe7 100755
--- a/pull-binary-libs.sh
+++ b/pull-binary-libs.sh
@@ -2,6 +2,18 @@
#
# Script to pull binary artifacts for scala from the remote repository.
+# Avoid corrupting the jar cache in ~/.sbt and the ugly crash when curl is not installed
+# This affects Linux systems mostly, because wget is the default download tool and curl
+# is not installed at all.
+curl --version &> /dev/null
+if [ $? -ne 0 ]
+then
+ echo ""
+ echo "Please install curl to download the jar files necessary for building Scala."
+ echo ""
+ exit 1
+fi
+
. $(dirname $0)/tools/binary-repo-lib.sh
# TODO - argument parsing...
diff --git a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
index 257fe92a91..15ce60566a 100644
--- a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
+++ b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
@@ -4,9 +4,9 @@ package scheduler
import java.util.Collection
import scala.concurrent.forkjoin.{ForkJoinPool, ForkJoinTask}
-private class DrainableForkJoinPool extends ForkJoinPool {
+private class DrainableForkJoinPool(parallelism: Int, maxPoolSize: Int) extends ForkJoinPool(parallelism, ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true) {
- override def drainTasksTo(c: Collection[ForkJoinTask[_]]): Int =
+ override def drainTasksTo(c: Collection[ _ >: ForkJoinTask[_]]): Int =
super.drainTasksTo(c)
}
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index ba0f88c668..ce67ffd037 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -38,13 +38,8 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
}
private def makeNewPool(): DrainableForkJoinPool = {
- val p = new DrainableForkJoinPool()
- // enable locally FIFO scheduling mode
- p.setAsyncMode(true)
- p.setParallelism(initCoreSize)
- p.setMaximumPoolSize(maxSize)
+ val p = new DrainableForkJoinPool(initCoreSize, maxSize)
Debug.info(this+": parallelism "+p.getParallelism())
- Debug.info(this+": max pool size "+p.getMaximumPoolSize())
p
}
@@ -144,7 +139,7 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
def block = blocker.block()
def isReleasable() = blocker.isReleasable
- }, true)
+ })
}
/** Suspends the scheduler. All threads that were in use by the
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 9e5b6810c1..cce00321df 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -97,7 +97,7 @@ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object FunctionZero extends Function(0) {
override def genprodString = "\n// genprod generated these sources at: " + new java.util.Date()
override def covariantSpecs = "@specialized "
- override def descriptiveComment = functionNTemplate.format("javaVersion", "anonfun0",
+ override def descriptiveComment = " " + functionNTemplate.format("javaVersion", "anonfun0",
"""
* val javaVersion = () => sys.props("java.version")
*
@@ -111,10 +111,10 @@ object FunctionZero extends Function(0) {
object FunctionOne extends Function(1) {
override def classAnnotation = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n"
- override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) "
- override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
+ override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) "
+ override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) "
- override def descriptiveComment = functionNTemplate.format("succ", "anonfun1",
+ override def descriptiveComment = " " + functionNTemplate.format("succ", "anonfun1",
"""
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
@@ -146,7 +146,7 @@ object FunctionTwo extends Function(2) {
override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Double) "
override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
- override def descriptiveComment = functionNTemplate.format("max", "anonfun2",
+ override def descriptiveComment = " " + functionNTemplate.format("max", "anonfun2",
"""
* val max = (x: Int, y: Int) => if (x < y) y else x
*
@@ -175,14 +175,20 @@ class Function(val i: Int) extends Group("Function") with Arity {
*
* {{{
* object Main extends App { %s }
- * }}}"""
+ * }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+ """
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
<file name={fileName}>{header}
/** A function of {i} parameter{s}.
- * {descriptiveComment}
+ *{descriptiveComment}
*/
{classAnnotation}trait {className}{contraCoArgs} extends AnyRef {{ self =>
/** Apply the body of this function to the argument{s}.
@@ -211,12 +217,11 @@ class Function(val i: Int) extends Group("Function") with Arity {
)
// f(x1,x2,x3,x4,x5,x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)
- def curryComment = { """
- /** Creates a curried version of this function.
+ def curryComment = {
+"""/** Creates a curried version of this function.
*
* @return a function `f` such that `f%s == apply%s`
- */
-""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
+ */""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
}
def tupleMethod = {
@@ -272,7 +277,7 @@ object TupleOne extends Tuple(1)
object TupleTwo extends Tuple(2)
{
override def imports = Tuple.zipImports
- override def covariantSpecs = "@specialized(Int, Long, Double) "
+ override def covariantSpecs = "@specialized(Int, Long, Double, Char, Boolean, AnyRef) "
override def moreMethods = """
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/continuations-plugin-pom.xml
index 0277b899ed..aca519b87e 100644
--- a/src/build/maven/continuations-plugin-pom.xml
+++ b/src/build/maven/continuations-plugin-pom.xml
@@ -6,6 +6,8 @@
<artifactId>continuations</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Scala Continuations Plugin</name>
+ <description>Delimited continuations compilation for Scala</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2010</inceptionYear>
<organization>
@@ -48,4 +50,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/jline-pom.xml b/src/build/maven/jline-pom.xml
index 0c96c1374e..4752deb5e0 100644
--- a/src/build/maven/jline-pom.xml
+++ b/src/build/maven/jline-pom.xml
@@ -6,6 +6,8 @@
<artifactId>jline</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>jline</name>
+ <description>Like readline, but better</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2011</inceptionYear>
<organization>
@@ -54,4 +56,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 9fddca3c73..2e490163e0 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -9,15 +9,12 @@
<!-- Pull in properties from build -->
<property file="build.properties" />
<!-- Initialize specific properties -->
- <!--<property name="remote.snapshot.repository" value="http://scala-tools.org:8081/nexus/content/repositories/snapshots" />
- <property name="remote.release.repository" value="http://scala-tools.org:8081/nexus/content/repositories/releases" />-->
-
- <property name="remote.snapshot.repository" value="http://nexus.scala-tools.org/content/repositories/snapshots" />
- <property name="remote.release.repository" value="http://nexus.scala-tools.org/content/repositories/releases" />
+ <property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
+ <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
<property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
<property name="local.release.repository" value="${user.home}/.m2/repository" />
- <property name="repository.credentials.id" value="scala-tools.org" />
+ <property name="repository.credentials.id" value="sonatype-nexus" />
<property name="settings.file" value="${user.home}/.m2/settings.xml" />
<echo>Using server[${repository.credentials.id}] for maven repository credentials.
@@ -30,6 +27,7 @@
<!-- Add our maven ant tasks -->
<path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.1.1.jar" />
<typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
+
<!-- simplify fixing pom versions -->
<macrodef name="make-pom">
<attribute name="name" />
@@ -60,13 +58,6 @@
<artifact:pom id="plugin-@{name}.pom" file="plugins/@{name}/@{name}-pom-fixed.xml" />
</sequential>
</macrodef>
- <!-- Simply attaching documentation -->
- <macrodef name="attach-doc">
- <attribute name="name" />
- <sequential>
- <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
- </sequential>
- </macrodef>
</target>
<!-- macros for local deployment -->
<target name="deploy.local.init" depends="init.maven">
@@ -82,6 +73,7 @@
<artifact:pom refid="@{name}.pom" />
<artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
<artifact:attach type="jar" file="@{name}/@{name}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
<extra-attachments />
</artifact:install>
</sequential>
@@ -97,6 +89,8 @@
<make-pom-plugin name="@{name}" version="@{version}" />
<artifact:install file="plugins/@{name}/@{name}.jar">
<artifact:pom refid="plugin-@{name}.pom" />
+ <artifact:attach type="jar" file="plugins/@{name}/@{name}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="plugins/@{name}/@{name}-docs.jar" classifier="javadoc" />
<artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
<extra-attachments />
</artifact:install>
@@ -109,24 +103,14 @@
<attribute name="repository" />
<attribute name="version" />
<sequential>
- <deploy-local name="scala-library" version="@{version}" repository="@{repository}">
- <extra-attachments>
- <artifact:attach type="jar" file="scala-library/scala-library-docs.jar" classifier="javadoc" />
- </extra-attachments>
- </deploy-local>
- <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scala-library" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
+ <deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
- <deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
- <!-- scala swing api is included in main library api
- <extra-attachments>
- <artifact:attach type="jar" file="scala-swing/scala-swing-docs.jar" classifier="javadoc" />
- </extra-attachments>
- </deploy-local>
- -->
+ <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
@@ -145,13 +129,13 @@
<artifact:pom refid="@{name}.pom" />
<artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
<artifact:attach type="jar" file="@{name}/@{name}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
<extra-attachments />
</artifact:deploy>
</sequential>
</macrodef>
-
- <!-- Deploy compiler plugins -->
+ <!-- Deploy compiler plugins -->
<macrodef name="deploy-remote-plugin">
<attribute name="name" />
<attribute name="version" />
@@ -161,6 +145,8 @@
<make-pom-plugin name="@{name}" version="@{version}" />
<artifact:deploy file="plugins/@{name}/@{name}.jar" settingsFile="${settings.file}">
<artifact:pom refid="plugin-@{name}.pom" />
+ <artifact:attach type="jar" file="plugins/@{name}/@{name}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="plugins/@{name}/@{name}-docs.jar" classifier="javadoc" />
<artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
<extra-attachments />
</artifact:deploy>
@@ -184,12 +170,72 @@
<deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
<deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
- <!-- scala swing api is included in main library api
- <extra-attachments>
- <artifact:attach type="jar" file="scala-swing/scala-swing-docs.jar" classifier="javadoc" />
- </extra-attachments>
- </deploy-remote>
- -->
+ </sequential>
+ </macrodef>
+
+ <!-- PGP Signed deployment -->
+ <macrodef name="deploy-remote-signed-single">
+ <attribute name="pom" />
+ <attribute name="repository" />
+ <attribute name="jar" />
+ <attribute name="srcjar" />
+ <attribute name="docjar" />
+ <sequential>
+ <artifact:mvn>
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=@{repository}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=@{pom}" />
+ <arg value="-Dfile=@{jar}" />
+ <arg value="-Dsources=@{srcjar}" />
+ <arg value="-Djavadoc=@{docjar}" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </sequential>
+ </macrodef>
+ <macrodef name="deploy-remote-signed">
+ <attribute name="name" />
+ <attribute name="repository" />
+ <attribute name="version" />
+ <element name="extra-attachments" optional="yes" />
+ <sequential>
+ <make-pom name="@{name}" version="@{version}" />
+ <deploy-remote-signed-single
+ pom="@{name}/@{name}-pom-fixed.xml"
+ repository="@{repository}"
+ jar="@{name}/@{name}.jar"
+ srcjar="@{name}/@{name}-src.jar"
+ docjar="@{name}/@{name}-docs.jar" />
+ </sequential>
+ </macrodef>
+ <macrodef name="deploy-remote-plugin-signed">
+ <attribute name="name" />
+ <attribute name="repository" />
+ <attribute name="version" />
+ <element name="extra-attachments" optional="yes" />
+ <sequential>
+ <make-pom-plugin name="@{name}" version="@{version}" />
+ <deploy-remote-signed-single
+ pom="plugins/@{name}/@{name}-pom-fixed.xml"
+ repository="@{repository}"
+ jar="plugins/@{name}/@{name}.jar"
+ srcjar="plugins/@{name}/@{name}-src.jar"
+ docjar="plugins/@{name}/@{name}-docs.jar" />
+ </sequential>
+ </macrodef>
+ <macrodef name="deploy-remote-signed-all">
+ <attribute name="repository" />
+ <attribute name="version" />
+ <sequential>
+ <deploy-remote-plugin-signed name="continuations" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
+ <deploy-remote-signed name="scala-dbc" version="@{version}" repository="@{repository}" />
+ <deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
@@ -204,6 +250,14 @@
</target>
<!-- Remote Targets -->
+ <target name="deploy.signed.snapshot" depends="deploy.remote.init" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
+ <deploy-remote-signed-all version="${maven.snapshot.version.number}" repository="${remote.snapshot.repository}" />
+ </target>
+
+ <target name="deploy.signed.release" depends="deploy.remote.init" description="Deploys the bundled files as a release into the desired remote Maven repository">
+ <deploy-remote-signed-all version="${version.number}" repository="${remote.release.repository}" />
+ </target>
+
<target name="deploy.snapshot" depends="deploy.remote.init" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
<deploy-remote-all version="${maven.snapshot.version.number}" repository="${remote.snapshot.repository}" />
</target>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index 520c5fd5fd..f9bcb6719d 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -6,6 +6,8 @@
<artifactId>scala-compiler</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Scala Compiler</name>
+ <description>Compiler for the Scala Programming Language</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -54,4 +56,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scala-dbc-pom.xml b/src/build/maven/scala-dbc-pom.xml
index 6c1fa4529c..23092d10ad 100644
--- a/src/build/maven/scala-dbc-pom.xml
+++ b/src/build/maven/scala-dbc-pom.xml
@@ -6,6 +6,8 @@
<artifactId>scala-dbc</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Scala Database Connectivity</name>
+ <description>Connectivity for your DBs</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -47,4 +49,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index 836ff4766a..8e0abd4937 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -6,6 +6,8 @@
<artifactId>scala-library</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Scala Library</name>
+ <description>Standard library for the Scala Programming Language</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -40,4 +42,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
index 3dc330672a..f18ca46c50 100644
--- a/src/build/maven/scala-partest-pom.xml
+++ b/src/build/maven/scala-partest-pom.xml
@@ -6,6 +6,8 @@
<artifactId>scala-partest</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Parallel Test Framework</name>
+ <description>testing framework for the Scala compiler.</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -48,4 +50,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scala-swing-pom.xml b/src/build/maven/scala-swing-pom.xml
index 06b799c7b7..a03bc07ab0 100644
--- a/src/build/maven/scala-swing-pom.xml
+++ b/src/build/maven/scala-swing-pom.xml
@@ -6,6 +6,8 @@
<artifactId>scala-swing</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Scala Swing library</name>
+ <description>Swing for Scala</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -47,4 +49,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index 3326e2d350..d7f867d4a1 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -6,6 +6,8 @@
<artifactId>scalap</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
+ <name>Scalap</name>
+ <description>bytecode analysis tool</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -48,4 +50,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 05e49e5e81..90aec8e25b 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -11,6 +11,9 @@ PROPERTIES
============================================================================ -->
<property name="sbaz.universe" value="http://www.scala-lang.org/downloads/packages"/>
+ <property file="${basedir}/build.number.maven"/>
+ <!-- the maven stuff requires version.major, version.minor and version.patch properties.
+ the "get-scala-revision" script only returns "version.number" -->
<!-- ===========================================================================
MAIN DISTRIBUTION PACKAGING
@@ -248,23 +251,50 @@ MAIN DISTRIBUTION SBAZ
</target>
<target name="pack-maven.srcs" depends="pack-maven.libs">
+ <!-- Add missing src jars. -->
<jar destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
basedir="${src.dir}/jline/src/main/java">
<include name="**/*"/>
</jar>
+
+
+ <!-- Continuations plugin -->
+ <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
+ basedir="${src.dir}/continuations/plugin">
+ <include name="**/*"/>
+ </jar>
</target>
<target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
+ <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
+ basedir="${build-docs.dir}/jline">
+ <include name="**/*"/>
+ </jar>
<jar destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
</jar>
- <!-- scala-swing api is included in main library api
- <jar destfile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
- basedir="${build-docs.dir}/swing">
+ <jar destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
+ basedir="${build-docs.dir}/compiler">
+ <include name="**/*"/>
+ </jar>
+ <jar destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
+ basedir="${build-docs.dir}/scalap">
+ <include name="**/*"/>
+ </jar>
+ <jar destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
+ basedir="${build-docs.dir}/scala-partest">
+ <include name="**/*"/>
+ </jar>
+ <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
+ basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
- -->
+ <!-- TODO - Scala swing, dbc should maybe have thier own jar, but creating it is SLOW. -->
+ <copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ <copy tofile="${dists.dir}/maven/${version.number}/scala-dbc/scala-dbc-docs.jar"
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
</target>
<target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
diff --git a/src/compiler/rootdoc.txt b/src/compiler/rootdoc.txt
new file mode 100644
index 0000000000..173f604098
--- /dev/null
+++ b/src/compiler/rootdoc.txt
@@ -0,0 +1,6 @@
+The Scala compiler API.
+
+The following resources are useful for Scala plugin/compiler development:
+ - [[http://www.scala-lang.org/node/215 Scala development tutorials]] on [[http://www.scala-lang.org www.scala-lang.org]]
+ - [[https://wiki.scala-lang.org/display/SIW/ Scala Internals wiki]]
+ - [[http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ Scala compiler corner]], maintained by Miguel
diff --git a/src/compiler/scala/reflect/internal/AnnotationInfos.scala b/src/compiler/scala/reflect/internal/AnnotationInfos.scala
index c3dde3e6d1..9a7c79d856 100644
--- a/src/compiler/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/compiler/scala/reflect/internal/AnnotationInfos.scala
@@ -116,6 +116,11 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
// Classfile annot: args empty. Scala annot: assocs empty.
assert(args.isEmpty || assocs.isEmpty, atp)
+ // @xeno.by: necessary for reification, see Reifiers.scala for more info
+ private var orig: Tree = EmptyTree
+ def original = orig
+ def setOriginal(t: Tree): this.type = { orig = t; this }
+
override def toString = (
atp +
(if (!args.isEmpty) args.mkString("(", ", ", ")") else "") +
@@ -130,7 +135,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
private var forced = false
private lazy val forcedInfo =
try {
- val result = lazyInfo
+ val result = lazyInfo
if (result.pos == NoPosition) result setPos pos
result
} finally forced = true
@@ -138,10 +143,12 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
def atp: Type = forcedInfo.atp
def args: List[Tree] = forcedInfo.args
def assocs: List[(Name, ClassfileAnnotArg)] = forcedInfo.assocs
+ def original: Tree = forcedInfo.original
+ def setOriginal(t: Tree): this.type = { forcedInfo.setOriginal(t); this }
// We should always be able to print things without forcing them.
override def toString = if (forced) forcedInfo.toString else "@<?>"
-
+
override def pos: Position = if (forced) forcedInfo.pos else NoPosition
}
@@ -166,10 +173,16 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
def args: List[Tree]
def assocs: List[(Name, ClassfileAnnotArg)]
+ // @xeno.by: necessary for reification, see Reifiers.scala for more info
+ def original: Tree
+ def setOriginal(t: Tree): this.type
+
/** Hand rolling Product. */
def _1 = atp
def _2 = args
def _3 = assocs
+ // @xeno.by: original hasn't become a product member for backward compatibility purposes
+ // def _4 = original
def canEqual(other: Any) = other.isInstanceOf[AnnotationInfo]
override def productPrefix = "AnnotationInfo"
diff --git a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
index 9e5c93753f..3753a45133 100644
--- a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
@@ -29,12 +29,12 @@ trait BaseTypeSeqs {
this: SymbolTable =>
import definitions._
- protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+ protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
/** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead.
- * This is necessary because when run from reflection every base type sequence needs to have a
- * SynchronizedBaseTypeSeq as mixin.
+ * This is necessary because when run from reflection every base type sequence needs to have a
+ * SynchronizedBaseTypeSeq as mixin.
*/
class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
@@ -242,7 +242,7 @@ trait BaseTypeSeqs {
// Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG
newBaseTypeSeq(parents, elems)
}
-
+
class MappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) extends BaseTypeSeq(orig.parents map f, orig.elems) {
override def apply(i: Int) = f(orig.apply(i))
override def rawElem(i: Int) = f(orig.rawElem(i))
@@ -254,7 +254,7 @@ trait BaseTypeSeqs {
override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max
override def toString = elems.mkString("MBTS(", ",", ")")
- }
-
+ }
+
val CyclicInheritance = new Throwable
}
diff --git a/src/compiler/scala/reflect/internal/ClassfileConstants.scala b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
index f1bf41ede9..eec72d082d 100644
--- a/src/compiler/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
@@ -88,6 +88,7 @@ object ClassfileConstants {
final val ARRAY_TAG = '['
final val VOID_TAG = 'V'
final val TVAR_TAG = 'T'
+ final val OBJECT_TAG = 'L'
final val ANNOTATION_TAG = '@'
final val SCALA_NOTHING = "scala.runtime.Nothing$"
final val SCALA_NULL = "scala.runtime.Null$"
@@ -359,7 +360,7 @@ object ClassfileConstants {
res |= translateFlag(jflags & JAVA_ACC_INTERFACE)
res
}
-
+
def classFlags(jflags: Int): Long = {
initFields(jflags)
isClass = true
@@ -375,11 +376,11 @@ object ClassfileConstants {
}
}
object FlagTranslation extends FlagTranslation { }
-
+
def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags
def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags
-
+
@deprecated("Use another method in this object", "2.10.0")
def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
if (isClass) toScalaClassFlags(flags)
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 2ca9c8bfd0..9114eb4b67 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -72,8 +72,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
tpnme.Float -> FLOAT_TAG,
tpnme.Double -> DOUBLE_TAG,
tpnme.Boolean -> BOOL_TAG,
- tpnme.Unit -> VOID_TAG,
- tpnme.Object -> TVAR_TAG
+ tpnme.Unit -> VOID_TAG
)
private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
@@ -82,7 +81,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
private def boxedName(name: Name) = sn.Boxed(name.toTypeName)
- lazy val abbrvTag = symbolsMap(ObjectClass :: ScalaValueClasses, nameToTag)
+ lazy val abbrvTag = symbolsMap(ScalaValueClasses, nameToTag) withDefaultValue OBJECT_TAG
lazy val numericWeight = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
lazy val boxedModule = classesMap(x => getModule(boxedName(x)))
lazy val boxedClass = classesMap(x => getClass(boxedName(x)))
@@ -129,6 +128,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
FloatClass,
DoubleClass
)
+ def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
}
object definitions extends AbsDefinitions with ValueClassDefinitions {
@@ -222,8 +222,12 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.typeConstructor)
lazy val ObjectClass = getClass(sn.Object)
- lazy val AnyCompanionClass = getRequiredClass("scala.AnyCompanion") initFlags (SEALED | ABSTRACT | TRAIT)
- lazy val AnyValCompanionClass = getRequiredClass("scala.AnyValCompanion") initFlags (SEALED | ABSTRACT | TRAIT)
+
+ // Note: this is not the type alias AnyRef, it's a companion-like
+ // object used by the @specialize annotation.
+ lazy val AnyRefModule = getMember(ScalaPackageClass, nme.AnyRef)
+ @deprecated("Use AnyRefModule", "2.10.0")
+ def Predef_AnyRef = AnyRefModule
lazy val AnyValClass = ScalaPackageClass.info member tpnme.AnyVal orElse {
val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, List(AnyClass.tpe, NotNullClass.tpe), 0L)
@@ -285,13 +289,12 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val PredefModule: Symbol = getRequiredModule("scala.Predef")
lazy val PredefModuleClass = PredefModule.moduleClass
- // Note: this is not the type alias AnyRef, it's a val defined in Predef
- // used by the @specialize annotation.
- def Predef_AnyRef = getMember(PredefModule, nme.AnyRef)
+
def Predef_classOf = getMember(PredefModule, nme.classOf)
def Predef_identity = getMember(PredefModule, nme.identity)
def Predef_conforms = getMember(PredefModule, nme.conforms)
def Predef_wrapRefArray = getMember(PredefModule, nme.wrapRefArray)
+ def Predef_??? = getMember(PredefModule, nme.???)
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
@@ -302,6 +305,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
(sym.name == name) && (sym.owner == PredefModule.moduleClass)
)
+ /** Specialization.
+ */
+ lazy val SpecializableModule = getRequiredModule("scala.Specializable")
+ lazy val GroupOfSpecializable = SpecializableModule.info.member(newTypeName("Group"))
+
lazy val ConsoleModule: Symbol = getRequiredModule("scala.Console")
lazy val ScalaRunTimeModule: Symbol = getRequiredModule("scala.runtime.ScalaRunTime")
lazy val SymbolModule: Symbol = getRequiredModule("scala.Symbol")
@@ -422,9 +430,6 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val FullManifestModule = getRequiredModule("scala.reflect.Manifest")
lazy val OptManifestClass = getRequiredClass("scala.reflect.OptManifest")
lazy val NoManifest = getRequiredModule("scala.reflect.NoManifest")
- lazy val CodeClass = getClass(sn.Code)
- lazy val CodeModule = getModule(sn.Code)
- lazy val Code_lift = getMember(CodeModule, nme.lift_)
lazy val ScalaSignatureAnnotation = getRequiredClass("scala.reflect.ScalaSignature")
lazy val ScalaLongSignatureAnnotation = getRequiredClass("scala.reflect.ScalaLongSignature")
@@ -609,14 +614,6 @@ trait Definitions extends reflect.api.StandardDefinitions {
case _ => NoType
}
- /** To avoid unchecked warnings on polymorphic classes, translate
- * a Foo[T] into a Foo[_] for use in the pattern matcher.
- */
- def typeCaseType(clazz: Symbol) = clazz.tpe.normalize match {
- case TypeRef(_, sym, args) if args.nonEmpty => newExistentialType(sym.typeParams, clazz.tpe)
- case tp => tp
- }
-
def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg))
def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg))
def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg))
@@ -629,6 +626,29 @@ trait Definitions extends reflect.api.StandardDefinitions {
if (phase.erasedTypes || forMSIL) ClassClass.tpe
else appliedType(ClassClass.typeConstructor, List(arg))
+ def vmClassType(arg: Type): Type = ClassType(arg)
+ def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
+
+ /** Given a class symbol C with type parameters T1, T2, ... Tn
+ * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
+ * returns an existential type of the form
+ *
+ * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
+ */
+ def classExistentialType(clazz: Symbol): Type =
+ newExistentialType(clazz.typeParams, clazz.tpe)
+
+ /** Given type U, creates a Type representing Class[_ <: U].
+ */
+ def boundedClassType(upperBound: Type) =
+ appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound))
+
+ /** To avoid unchecked warnings on polymorphic classes, translate
+ * a Foo[T] into a Foo[_] for use in the pattern matcher.
+ */
+ @deprecated("Use classExistentialType", "2.10.0")
+ def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz)
+
//
// .NET backend
//
@@ -881,6 +901,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
try getModule(fullname.toTermName)
catch { case _: MissingRequirementError => NoSymbol }
+ def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
+ def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
+
def getMember(owner: Symbol, name: Name): Symbol = {
if (owner == NoSymbol) NoSymbol
else owner.info.nonPrivateMember(name) match {
@@ -964,8 +987,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isPrimitiveValueClass(sym: Symbol) = scalaValueClassesSet(sym)
/** Is symbol a value class? */
- def isValueClass(sym: Symbol) = scalaValueClassesSet(sym)
- def isNonUnitValueClass(sym: Symbol) = (sym != UnitClass) && isValueClass(sym)
+ def isValueClass(sym: Symbol) = scalaValueClassesSet(sym)
+ def isNonUnitValueClass(sym: Symbol) = isValueClass(sym) && (sym != UnitClass)
+ def isSpecializableClass(sym: Symbol) = isValueClass(sym) || (sym == AnyRefClass)
def isScalaValueType(tp: Type) = scalaValueClassesSet(tp.typeSymbol)
/** Is symbol a boxed value class, e.g. java.lang.Integer? */
diff --git a/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
new file mode 100644
index 0000000000..f1fe4fc118
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -0,0 +1,50 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import util._
+
+/** The name of this trait defines the eventual intent better than
+ * it does the initial contents.
+ */
+trait ExistentialsAndSkolems {
+ self: SymbolTable =>
+
+ /** Map a list of type parameter symbols to skolemized symbols, which
+ * can be deskolemized to the original type parameter. (A skolem is a
+ * representation of a bound variable when viewed inside its scope.)
+ * !!!Adriaan: this does not work for hk types.
+ */
+ def deriveFreshSkolems(tparams: List[Symbol]): List[Symbol] = {
+ class Deskolemizer extends LazyType {
+ override val typeParams = tparams
+ val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
+ override def complete(sym: Symbol) {
+ // The info of a skolem is the skolemized info of the
+ // actual type parameter of the skolem
+ sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems)
+ }
+ }
+ (new Deskolemizer).typeSkolems
+ }
+
+ /** Convert to corresponding type parameters all skolems of method
+ * parameters which appear in `tparams`.
+ */
+ def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
+ class DeSkolemizeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
+ mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ new DeSkolemizeMap mapOver tp
+ }
+}
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index 66af92be5f..270491d078 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -165,6 +165,7 @@ class Flags extends ModifierFlags {
final val TRIEDCOOKING = 0x100000000000L // ``Cooking'' has been tried on this symbol
// A Java method's type is ``cooked'' by transforming raw types to existentials
+ final val SYNCHRONIZED = 0x200000000000L // symbol is a method which should be marked ACC_SYNCHRONIZED
// ------- shift definitions -------------------------------------------------------
final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
@@ -222,7 +223,7 @@ class Flags extends ModifierFlags {
/** These modifiers appear in TreePrinter output. */
final val PrintableFlags: Long =
ExplicitFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO |
- ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | BRIDGE | STATIC | VBRIDGE | SPECIALIZED
+ ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | BRIDGE | STATIC | VBRIDGE | SPECIALIZED | SYNCHRONIZED
/** The two bridge flags */
final val BridgeFlags = BRIDGE | VBRIDGE
@@ -384,7 +385,7 @@ class Flags extends ModifierFlags {
case VBRIDGE => "<vbridge>" // (1L << 42)
case VARARGS => "<varargs>" // (1L << 43)
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
- case 0x200000000000L => "" // (1L << 45)
+ case SYNCHRONIZED => "<synchronized>" // (1L << 45)
case 0x400000000000L => "" // (1L << 46)
case 0x800000000000L => "" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
@@ -466,7 +467,7 @@ class Flags extends ModifierFlags {
}
protected final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
- def flagOfModifier(mod: Modifier.Value): Long = mod match {
+ def flagOfModifier(mod: Modifier): Long = mod match {
case Modifier.`protected` => PROTECTED
case Modifier.`private` => PRIVATE
case Modifier.`override` => OVERRIDE
@@ -496,13 +497,13 @@ class Flags extends ModifierFlags {
case Modifier.bynameParameter => BYNAMEPARAM
}
- def flagsOfModifiers(mods: List[Modifier.Value]): Long =
+ def flagsOfModifiers(mods: List[Modifier]): Long =
(mods :\ 0L) { (mod, curr) => curr | flagOfModifier(mod) }
- def modifierOfFlag(flag: Long): Option[Modifier.Value] =
+ def modifierOfFlag(flag: Long): Option[Modifier] =
Modifier.values find { mod => flagOfModifier(mod) == flag }
- def modifiersOfFlags(flags: Long): List[Modifier.Value] =
+ def modifiersOfFlags(flags: Long): List[Modifier] =
pickledListOrder map (mask => modifierOfFlag(flags & mask)) flatMap { mod => mod }
}
diff --git a/src/compiler/scala/reflect/internal/HasFlags.scala b/src/compiler/scala/reflect/internal/HasFlags.scala
index ec4e919bdc..8affd66cd5 100644
--- a/src/compiler/scala/reflect/internal/HasFlags.scala
+++ b/src/compiler/scala/reflect/internal/HasFlags.scala
@@ -136,7 +136,7 @@ trait HasFlags {
/** Whether this entity has NONE of the flags in the given mask.
*/
def hasNoFlags(mask: Long): Boolean = !hasFlag(mask)
-
+
protected def isSetting(f: Long, mask: Long) = !hasFlag(f) && ((mask & f) != 0L)
protected def isClearing(f: Long, mask: Long) = hasFlag(f) && ((mask & f) != 0L)
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index b36191b025..1003fa804f 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -9,16 +9,34 @@ trait Importers { self: SymbolTable =>
val from: SymbolTable
lazy val symMap: WeakHashMap[from.Symbol, Symbol] = new WeakHashMap
+ lazy val tpeMap: WeakHashMap[from.Type, Type] = new WeakHashMap
+
+ // fixups and maps prevent stackoverflows in importer
+ var pendingSyms = 0
+ var pendingTpes = 0
+ lazy val fixups = collection.mutable.MutableList[Function0[Unit]]()
+ def addFixup(fixup: => Unit): Unit = fixups += (() => fixup)
+ def tryFixup(): Unit = {
+ if (pendingSyms == 0 && pendingTpes == 0) {
+ val fixups = this.fixups.toList
+ this.fixups.clear()
+ fixups foreach { _() }
+ }
+ }
object reverse extends from.Importer {
val from: self.type = self
for ((fromsym, mysym) <- Importer.this.symMap) symMap += ((mysym, fromsym))
+ for ((fromtpe, mytpe) <- Importer.this.tpeMap) tpeMap += ((mytpe, fromtpe))
}
def importPosition(pos: from.Position): Position = NoPosition
- def importSymbol(sym: from.Symbol): Symbol = {
+ def importSymbol(sym0: from.Symbol): Symbol = {
def doImport(sym: from.Symbol): Symbol = {
+ if (symMap.contains(sym))
+ return symMap(sym)
+
val myowner = importSymbol(sym.owner)
val mypos = importPosition(sym.pos)
val myname = importName(sym.name).toTermName
@@ -32,7 +50,7 @@ trait Importers { self: SymbolTable =>
case x: from.MethodSymbol =>
linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
case x: from.ModuleSymbol =>
- linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, doImport)
+ linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
case x: from.FreeVar =>
newFreeVar(importName(x.name).toTermName, importType(x.tpe), x.value, myflags)
case x: from.TermSymbol =>
@@ -44,14 +62,14 @@ trait Importers { self: SymbolTable =>
case y: from.Symbol => importSymbol(y)
}
myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
- /*
- case x: from.ModuleClassSymbol =>
- val mysym = new ModuleClassSymbol(myowner, mypos, myname.toTypeName)
- mysym.sourceModule = importSymbol(x.sourceModule)
- mysym
-*/
+ case x: from.ModuleClassSymbol =>
+ val mysym = myowner.newModuleClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap(x) = mysym
+ mysym.sourceModule = importSymbol(x.sourceModule)
+ mysym
case x: from.ClassSymbol =>
val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap(x) = mysym
if (sym.thisSym != sym) {
mysym.typeOfThis = importType(sym.typeOfThis)
mysym.thisSym.name = importName(sym.thisSym.name)
@@ -63,7 +81,7 @@ trait Importers { self: SymbolTable =>
symMap(sym) = mysym
mysym setFlag Flags.LOCKED
mysym setInfo {
- val mytypeParams = sym.typeParams map doImport
+ val mytypeParams = sym.typeParams map importSymbol
new LazyPolyType(mytypeParams) {
override def complete(s: Symbol) {
val result = sym.info match {
@@ -78,7 +96,8 @@ trait Importers { self: SymbolTable =>
mysym resetFlag Flags.LOCKED
} // end doImport
- def importOrRelink: Symbol =
+ def importOrRelink: Symbol = {
+ val sym = sym0 // makes sym visible in the debugger
if (sym == null)
null
else if (sym == from.NoSymbol)
@@ -86,112 +105,156 @@ trait Importers { self: SymbolTable =>
else if (sym.isRoot)
definitions.RootClass
else {
- val myowner = importSymbol(sym.owner)
- val myname = importName(sym.name)
- if (sym.isModuleClass) {
- assert(sym.sourceModule != NoSymbol, sym)
- val mymodule = importSymbol(sym.sourceModule)
- assert(mymodule != NoSymbol, sym)
- assert(mymodule.moduleClass != NoSymbol, mymodule)
- mymodule.moduleClass
- } else if (myowner.isClass && !myowner.isRefinementClass && !(myowner hasFlag Flags.LOCKED) && sym.owner.info.decl(sym.name).exists) {
- // symbol is in class scope, try to find equivalent one in local scope
- if (sym.isOverloaded)
- myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
- else {
- var existing: Symbol = myowner.info.decl(myname)
- if (existing.isOverloaded) {
- existing =
- if (sym.isMethod) {
- val localCopy = doImport(sym)
- existing filter (_.tpe matches localCopy.tpe)
- } else {
- existing filter (!_.isMethod)
- }
- assert(!existing.isOverloaded,
- "import failure: cannot determine unique overloaded method alternative from\n "+
- (existing.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
+ val name = sym.name
+ val owner = sym.owner
+ var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType
+ var existing = scope.decl(name)
+ if (sym.isPackageClass || sym.isModuleClass) existing = existing.moduleClass
+ if (!existing.exists) scope = from.NoType
+
+ val myname = importName(name)
+ val myowner = importSymbol(owner)
+ val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
+ var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods
+ if (sym.isPackageClass || sym.isModuleClass) myexisting = importSymbol(sym.sourceModule).moduleClass
+ if (!sym.isOverloaded && myexisting.isOverloaded) {
+ myexisting =
+ if (sym.isMethod) {
+ val localCopy = doImport(sym)
+ myexisting filter (_.tpe matches localCopy.tpe)
+ } else {
+ myexisting filter (!_.isMethod)
}
- if (existing != NoSymbol) existing
- else {
+ assert(!myexisting.isOverloaded,
+ "import failure: cannot determine unique overloaded method alternative from\n "+
+ (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
+ }
+
+ val mysym = {
+ if (sym.isOverloaded) {
+ myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
+ } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
+ assert(myowner.typeParams.length > sym.paramPos,
+ "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
+ myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
+ sym.owner+from.typeParamsString(sym.owner.info))
+ myowner.typeParams(sym.paramPos)
+ } else {
+ if (myexisting != NoSymbol) {
+ myexisting
+ } else {
val mysym = doImport(sym)
- assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+existing)
- myowner.info.decls enter mysym
+
+ if (myscope != NoType) {
+ assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting)
+ myowner.info.decls enter mysym
+ }
+
mysym
}
}
- } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
- assert(myowner.typeParams.length > sym.paramPos,
- "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
- myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
- sym.owner+from.typeParamsString(sym.owner.info))
- myowner.typeParams(sym.paramPos)
- } else
- doImport(sym)
+ }
+
+ mysym
}
- symMap getOrElseUpdate (sym, importOrRelink)
+ } // end importOrRelink
+
+ val sym = sym0
+ if (symMap contains sym) {
+ symMap(sym)
+ } else {
+ pendingSyms += 1
+
+ try {
+ symMap getOrElseUpdate (sym, importOrRelink)
+ } finally {
+ pendingSyms -= 1
+ tryFixup()
+ }
+ }
}
- def importType(tpe: from.Type): Type = tpe match {
- case from.TypeRef(pre, sym, args) =>
- TypeRef(importType(pre), importSymbol(sym), args map importType)
- case from.ThisType(clazz) =>
- ThisType(importSymbol(clazz))
- case from.SingleType(pre, sym) =>
- SingleType(importType(pre), importSymbol(sym))
- case from.MethodType(params, restpe) =>
- MethodType(params map importSymbol, importType(restpe))
- case from.PolyType(tparams, restpe) =>
- PolyType(tparams map importSymbol, importType(restpe))
- case from.NullaryMethodType(restpe) =>
- NullaryMethodType(importType(restpe))
- case from.ConstantType(constant @ from.Constant(_)) =>
- ConstantType(importConstant(constant))
- case from.SuperType(thistpe, supertpe) =>
- SuperType(importType(thistpe), importType(supertpe))
- case from.TypeBounds(lo, hi) =>
- TypeBounds(importType(lo), importType(hi))
- case from.BoundedWildcardType(bounds) =>
- BoundedWildcardType(importTypeBounds(bounds))
- case from.ClassInfoType(parents, decls, clazz) =>
- val myclazz = importSymbol(clazz)
- val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
- val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
- myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
- decls foreach importSymbol // will enter itself into myclazz
- myclazzTpe
- case from.RefinedType(parents, decls) =>
- RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
- case from.ExistentialType(tparams, restpe) =>
- newExistentialType(tparams map importSymbol, importType(restpe))
- case from.OverloadedType(pre, alts) =>
- OverloadedType(importType(pre), alts map importSymbol)
- case from.AntiPolyType(pre, targs) =>
- AntiPolyType(importType(pre), targs map importType)
- case x: from.TypeVar =>
- TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
- case from.NotNullType(tpe) =>
- NotNullType(importType(tpe))
- case from.AnnotatedType(annots, tpe, selfsym) =>
- AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
- case from.ErrorType =>
- ErrorType
- case from.WildcardType =>
- WildcardType
- case from.NoType =>
- NoType
- case from.NoPrefix =>
- NoPrefix
- case null =>
- null
+ def importType(tpe: from.Type): Type = {
+ def doImport(tpe: from.Type): Type = tpe match {
+ case from.TypeRef(pre, sym, args) =>
+ TypeRef(importType(pre), importSymbol(sym), args map importType)
+ case from.ThisType(clazz) =>
+ ThisType(importSymbol(clazz))
+ case from.SingleType(pre, sym) =>
+ SingleType(importType(pre), importSymbol(sym))
+ case from.MethodType(params, restpe) =>
+ MethodType(params map importSymbol, importType(restpe))
+ case from.PolyType(tparams, restpe) =>
+ PolyType(tparams map importSymbol, importType(restpe))
+ case from.NullaryMethodType(restpe) =>
+ NullaryMethodType(importType(restpe))
+ case from.ConstantType(constant @ from.Constant(_)) =>
+ ConstantType(importConstant(constant))
+ case from.SuperType(thistpe, supertpe) =>
+ SuperType(importType(thistpe), importType(supertpe))
+ case from.TypeBounds(lo, hi) =>
+ TypeBounds(importType(lo), importType(hi))
+ case from.BoundedWildcardType(bounds) =>
+ BoundedWildcardType(importTypeBounds(bounds))
+ case from.ClassInfoType(parents, decls, clazz) =>
+ val myclazz = importSymbol(clazz)
+ val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
+ val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
+ myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
+ decls foreach importSymbol // will enter itself into myclazz
+ myclazzTpe
+ case from.RefinedType(parents, decls) =>
+ RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
+ case from.ExistentialType(tparams, restpe) =>
+ newExistentialType(tparams map importSymbol, importType(restpe))
+ case from.OverloadedType(pre, alts) =>
+ OverloadedType(importType(pre), alts map importSymbol)
+ case from.AntiPolyType(pre, targs) =>
+ AntiPolyType(importType(pre), targs map importType)
+ case x: from.TypeVar =>
+ TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
+ case from.NotNullType(tpe) =>
+ NotNullType(importType(tpe))
+ case from.AnnotatedType(annots, tpe, selfsym) =>
+ AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
+ case from.ErrorType =>
+ ErrorType
+ case from.WildcardType =>
+ WildcardType
+ case from.NoType =>
+ NoType
+ case from.NoPrefix =>
+ NoPrefix
+ case null =>
+ null
+ } // end doImport
+
+ def importOrRelink: Type =
+ doImport(tpe)
+
+ if (tpeMap contains tpe) {
+ tpeMap(tpe)
+ } else {
+ pendingTpes += 1
+
+ try {
+ tpeMap getOrElseUpdate (tpe, importOrRelink)
+ } finally {
+ pendingTpes -= 1
+ tryFixup()
+ }
+ }
}
def importTypeBounds(bounds: from.TypeBounds) = importType(bounds).asInstanceOf[TypeBounds]
- def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo =
- AnnotationInfo(importType(ann.atp), ann.args map importTree, ann.assocs map {
- case (name, arg) => (importName(name), importAnnotArg(arg))
- })
+ def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo = {
+ val atp1 = importType(ann.atp)
+ val args1 = ann.args map importTree
+ val assocs1 = ann.assocs map { case (name, arg) => (importName(name), importAnnotArg(arg)) }
+ val original1 = importTree(ann.original)
+ AnnotationInfo(atp1, args1, assocs1) setOriginal original1
+ }
def importAnnotArg(arg: from.ClassfileAnnotArg): ClassfileAnnotArg = arg match {
case from.LiteralAnnotArg(constant @ from.Constant(_)) =>
@@ -223,7 +286,7 @@ trait Importers { self: SymbolTable =>
new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
def importImportSelector(sel: from.ImportSelector): ImportSelector =
- new ImportSelector(importName(sel.name), sel.namePos, importName(sel.rename), sel.renamePos)
+ new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
def importTree(tree: from.Tree): Tree = {
val mytree = tree match {
@@ -265,6 +328,8 @@ trait Importers { self: SymbolTable =>
new Function(vparams map importValDef, importTree(body))
case from.Assign(lhs, rhs) =>
new Assign(importTree(lhs), importTree(rhs))
+ case from.AssignOrNamedArg(lhs, rhs) =>
+ new AssignOrNamedArg(importTree(lhs), importTree(rhs))
case from.If(cond, thenp, elsep) =>
new If(importTree(cond), importTree(thenp), importTree(elsep))
case from.Match(selector, cases) =>
@@ -326,21 +391,24 @@ trait Importers { self: SymbolTable =>
case null =>
null
}
- if (mytree != null) {
- val mysym = if (tree hasSymbol) importSymbol(tree.symbol) else NoSymbol
- val mytpe = importType(tree.tpe)
-
- mytree match {
- case mytt: TypeTree =>
- val tt = tree.asInstanceOf[from.TypeTree]
- if (mytree hasSymbol) mytt.symbol = mysym
- if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
- if (tt.original != null) mytt.setOriginal(importTree(tt.original))
- case _ =>
- if (mytree hasSymbol) mytree.symbol = importSymbol(tree.symbol)
- mytree.tpe = importType(tree.tpe)
+ addFixup({
+ if (mytree != null) {
+ val mysym = if (tree hasSymbol) importSymbol(tree.symbol) else NoSymbol
+ val mytpe = importType(tree.tpe)
+
+ mytree match {
+ case mytt: TypeTree =>
+ val tt = tree.asInstanceOf[from.TypeTree]
+ if (mytree hasSymbol) mytt.symbol = mysym
+ if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
+ if (tt.original != null) mytt.setOriginal(importTree(tt.original))
+ case _ =>
+ if (mytree hasSymbol) mytree.symbol = importSymbol(tree.symbol)
+ mytree.tpe = importType(tree.tpe)
+ }
}
- }
+ })
+ tryFixup()
mytree
}
@@ -356,4 +424,4 @@ trait Importers { self: SymbolTable =>
case _ => constant.value
})
}
-}
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala
index e675be43dc..23bff950b8 100644
--- a/src/compiler/scala/reflect/internal/Kinds.scala
+++ b/src/compiler/scala/reflect/internal/Kinds.scala
@@ -128,7 +128,7 @@ trait Kinds {
// @M sometimes hkargs != arg.typeParams, the symbol and the type may
// have very different type parameters
val hkparams = param.typeParams
-
+
def kindCheck(cond: Boolean, f: KindErrors => KindErrors) {
if (!cond)
kindErrors = f(kindErrors)
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index 97a74c2383..12f56976c9 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -22,10 +22,10 @@ trait NameManglers {
val MODULE_SUFFIX_STRING = NameTransformer.MODULE_SUFFIX_STRING
val NAME_JOIN_STRING = NameTransformer.NAME_JOIN_STRING
-
+
val MODULE_SUFFIX_NAME: TermName = newTermName(MODULE_SUFFIX_STRING)
val NAME_JOIN_NAME: TermName = newTermName(NAME_JOIN_STRING)
-
+
def flattenedName(segments: Name*): NameType = compactedString(segments mkString NAME_JOIN_STRING)
/**
@@ -76,12 +76,14 @@ trait NameManglers {
val PROTECTED_PREFIX = "protected$"
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
val SINGLETON_SUFFIX = ".type"
- val SPECIALIZED_SUFFIX_STRING = "$sp"
val SUPER_PREFIX_STRING = "super$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
-
- val SETTER_SUFFIX: TermName = encode("_=")
- val SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX_STRING
+ val SETTER_SUFFIX: TermName = encode("_=")
+
+ @deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
+ def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
+ @deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
+ def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
@@ -90,6 +92,7 @@ trait NameManglers {
def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
+ def isSuperAccessorName(name: Name) = name startsWith SUPER_PREFIX_STRING
def isReplWrapperName(name: Name) = name containsName INTERPRETER_IMPORT_WRAPPER
def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
@@ -118,13 +121,13 @@ trait NameManglers {
name.subName(i, name.length)
} else name
}
-
+
def unspecializedName(name: Name): Name = (
- if (name endsWith SPECIALIZED_SUFFIX_NAME)
+ if (name endsWith SPECIALIZED_SUFFIX)
name.subName(0, name.lastIndexOf('m') - 1)
else name
)
-
+
def macroMethodName(name: Name) = {
val base = if (name.isTypeName) nme.TYPEkw else nme.DEFkw
base append nme.MACRO append name
@@ -140,8 +143,8 @@ trait NameManglers {
* and another one belonging to the enclosing class, on Double.
*/
def splitSpecializedName(name: Name): (Name, String, String) =
- if (name endsWith SPECIALIZED_SUFFIX_NAME) {
- val name1 = name dropRight SPECIALIZED_SUFFIX_NAME.length
+ if (name endsWith SPECIALIZED_SUFFIX) {
+ val name1 = name dropRight SPECIALIZED_SUFFIX.length
val idxC = name1 lastIndexOf 'c'
val idxM = name1 lastIndexOf 'm'
@@ -155,7 +158,7 @@ trait NameManglers {
def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
-
+
def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
def setterToGetter(name: TermName): TermName = {
diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala
index 907b564d4c..5f38374f20 100644
--- a/src/compiler/scala/reflect/internal/Names.scala
+++ b/src/compiler/scala/reflect/internal/Names.scala
@@ -73,7 +73,7 @@ trait Names extends api.Names {
/** Create a term name from the characters in cs[offset..offset+len-1]. */
def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
newTermName(cs, offset, len, cachedString = null)
-
+
def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
@@ -87,7 +87,7 @@ trait Names extends api.Names {
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
n = n.next
-
+
if (n ne null) n
else {
// The logic order here is future-proofing against the possibility
@@ -135,7 +135,7 @@ trait Names extends api.Names {
/** The name class.
* TODO - resolve schizophrenia regarding whether to treat Names as Strings
- * or Strings as Names. Give names the key functions the absence of which
+ * or Strings as Names. Give names the key functions the absence of which
* make people want Strings all the time.
*/
sealed abstract class Name(protected val index: Int, protected val len: Int) extends AbsName with Function1[Int, Char] {
@@ -166,7 +166,7 @@ trait Names extends api.Names {
/** Return a new name of the same variety. */
def newName(str: String): ThisNameType
-
+
/** Return a new name based on string transformation. */
def mapName(f: String => String): ThisNameType = newName(f(toString))
@@ -357,7 +357,7 @@ trait Names extends api.Names {
def dropRight(n: Int) = subName(0, len - n)
def drop(n: Int) = subName(n, len)
-
+
def indexOf(ch: Char) = {
val idx = pos(ch)
if (idx == length) -1 else idx
@@ -382,11 +382,18 @@ trait Names extends api.Names {
}
newTermName(cs, 0, len)
}
-
+
/** TODO - reconcile/fix that encode returns a Name but
* decode returns a String.
*/
+ /** !!! Duplicative but consistently named.
+ */
+ def decoded: String = decode
+ def encoded: String = "" + encode
+ // def decodedName: ThisNameType = newName(decoded)
+ def encodedName: ThisNameType = encode
+
/** Replace operator symbols by corresponding $op_name. */
def encode: ThisNameType = {
val str = toString
@@ -418,7 +425,7 @@ trait Names extends api.Names {
def longString: String = nameKind + " " + decode
def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
}
-
+
/** A name that contains no operator chars nor dollar signs.
* TODO - see if it's any faster to do something along these lines.
*/
@@ -454,7 +461,7 @@ trait Names extends api.Names {
sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
-
+
var next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
@@ -481,7 +488,7 @@ trait Names extends api.Names {
sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TypeName
protected[this] def thisName: TypeName = this
-
+
var next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
diff --git a/src/compiler/scala/reflect/internal/Phase.scala b/src/compiler/scala/reflect/internal/Phase.scala
index acd3360c4f..89d643aacf 100644
--- a/src/compiler/scala/reflect/internal/Phase.scala
+++ b/src/compiler/scala/reflect/internal/Phase.scala
@@ -26,6 +26,8 @@ abstract class Phase(val prev: Phase) {
if ((prev ne null) && (prev ne NoPhase)) prev.nx = this
def next: Phase = nx
+ def hasNext = next != this
+ def iterator = Iterator.iterate(this)(_.next) takeWhile (p => p.next != p)
def name: String
def description: String = name
@@ -37,7 +39,7 @@ abstract class Phase(val prev: Phase) {
def refChecked: Boolean = false
/** This is used only in unsafeTypeParams, and at this writing is
- * overridden to false in namer, typer, and erasure. (And NoPhase.)
+ * overridden to false in parser, namer, typer, and erasure. (And NoPhase.)
*/
def keepsTypeParams = true
def run(): Unit
diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala
index 54d3de09cd..ef48d6102f 100644
--- a/src/compiler/scala/reflect/internal/Scopes.scala
+++ b/src/compiler/scala/reflect/internal/Scopes.scala
@@ -38,11 +38,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead.
- * This is necessary because when run from reflection every scope needs to have a
- * SynchronizedScope as mixin.
+ * This is necessary because when run from reflection every scope needs to have a
+ * SynchronizedScope as mixin.
*/
class Scope protected[Scopes] (initElems: ScopeEntry = null) extends Iterable[Symbol] {
-
+
protected[Scopes] def this(base: Scope) = {
this(base.elems)
nestinglevel = base.nestinglevel + 1
@@ -120,7 +120,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
* @param sym ...
*/
def enterUnique(sym: Symbol) {
- assert(lookup(sym.name) == NoSymbol)
+ assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString))
enter(sym)
}
@@ -319,7 +319,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** Create a new scope */
def newScope: Scope = new Scope()
-
+
/** Create a new scope nested in another one with which it shares its elements */
def newNestedScope(outer: Scope): Scope = new Scope(outer)
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index c3a7dc23f3..84007425ed 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -94,11 +94,13 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val EMPTY: NameType = ""
val ANON_FUN_NAME: NameType = "$anonfun"
+ val ANON_CLASS_NAME: NameType = "$anon"
val EMPTY_PACKAGE_NAME: NameType = "<empty>"
val IMPORT: NameType = "<import>"
val MODULE_VAR_SUFFIX: NameType = "$module"
val ROOT: NameType = "<root>"
val PACKAGE: NameType = "package"
+ val SPECIALIZED_SUFFIX: NameType = "$sp"
// value types (and AnyRef) are all used as terms as well
// as (at least) arguments to the @specialize annotation.
@@ -152,6 +154,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
final val Enum: NameType = "Enum"
+ final val Tree: NameType = "Tree"
+ final val TypeTree: NameType = "TypeTree"
+
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
@@ -240,7 +245,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val x_7 : NameType = "x$7"
val x_8 : NameType = "x$8"
val x_9 : NameType = "x$9"
-
+
@switch def syntheticParamName(i: Int): TermName = i match {
case 0 => nme.x_0
case 1 => nme.x_1
@@ -254,7 +259,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case 9 => nme.x_9
case _ => newTermName("x$" + i)
}
-
+
+ val ??? = encode("???")
+
val wrapRefArray: NameType = "wrapRefArray"
val wrapByteArray: NameType = "wrapByteArray"
val wrapShortArray: NameType = "wrapShortArray"
@@ -270,8 +277,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
// Compiler utilized names
// val productElementName: NameType = "productElementName"
val Ident: NameType = "Ident"
- val This: NameType = "This"
val StringContext: NameType = "StringContext"
+ val This: NameType = "This"
+ val Tree : NameType = "Tree"
val TYPE_ : NameType = "TYPE"
val TypeTree: NameType = "TypeTree"
val UNIT : NameType = "UNIT"
@@ -302,8 +310,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val classOf: NameType = "classOf"
val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
val conforms: NameType = "conforms"
- val context : NameType = "_context"
- val contextImplicit : NameType = "$context"
val copy: NameType = "copy"
val delayedInit: NameType = "delayedInit"
val delayedInitArg: NameType = "delayedInit$body"
@@ -327,6 +333,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val freeValue : NameType = "freeValue"
val genericArrayOps: NameType = "genericArrayOps"
val get: NameType = "get"
+ val getOrElse: NameType = "getOrElse"
val hasNext: NameType = "hasNext"
val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
val hash_ : NameType = "hash"
@@ -339,11 +346,14 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val isInstanceOf_ : NameType = "isInstanceOf"
val isInstanceOf_Ob : NameType = "$isInstanceOf"
val java: NameType = "java"
+ val key: NameType = "key"
val lang: NameType = "lang"
val length: NameType = "length"
val lengthCompare: NameType = "lengthCompare"
val lift_ : NameType = "lift"
val macro_ : NameType = "macro"
+ val macroThis : NameType = "_this"
+ val macroContext : NameType = "_context"
val main: NameType = "main"
val map: NameType = "map"
val mirror : NameType = "mirror"
@@ -368,7 +378,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val self: NameType = "self"
val setAccessible: NameType = "setAccessible"
val setAnnotations: NameType = "setAnnotations"
- val setTypeSig: NameType = "setTypeSig"
+ val setSymbol: NameType = "setSymbol"
+ val setType: NameType = "setType"
+ val setTypeSignature: NameType = "setTypeSignature"
val synchronized_ : NameType = "synchronized"
val tail: NameType = "tail"
val thisModuleType: NameType = "thisModuleType"
@@ -426,12 +438,11 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val toInteger: NameType = "toInteger"
}
- object tpnme extends TypeNames /*with LibraryTypeNames*/ with TypeNameMangling {
+ object tpnme extends AbsTypeNames with TypeNames /*with LibraryTypeNames*/ with TypeNameMangling {
type NameType = TypeName
protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
val REFINE_CLASS_NAME: NameType = "<refinement>"
- val ANON_CLASS_NAME: NameType = "$anon"
}
/** For fully qualified type names.
@@ -463,7 +474,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val javanme = nme.javaKeywords
- object nme extends TermNames /*with LibraryTermNames*/ with TermNameMangling {
+ object nme extends AbsTermNames with TermNames /*with LibraryTermNames*/ with TermNameMangling {
type NameType = TermName
protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
@@ -606,7 +617,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val testLessOrEqualThan: NameType = "testLessOrEqualThan"
val testLessThan: NameType = "testLessThan"
val testNotEqual: NameType = "testNotEqual"
-
+
val isBoxedNumberOrBoolean: NameType = "isBoxedNumberOrBoolean"
val isBoxedNumber: NameType = "isBoxedNumber"
@@ -658,7 +669,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case `toDouble` => toDouble
case _ => NO_NAME
}
-
+
val reflPolyCacheName: NameType = "reflPoly$Cache"
val reflClassCacheName: NameType = "reflClass$Cache"
val reflParamsCacheName: NameType = "reflParams$Cache"
@@ -710,7 +721,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val BoxedCharacter : TypeName
val BoxedNumber : TypeName
val Class : TypeName
- val Code : TypeName
val Delegate : TypeName
val IOOBException : TypeName // IndexOutOfBoundsException
val InvTargetException : TypeName // InvocationTargetException
@@ -845,7 +855,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val BoxedCharacter: TypeName = "System.IConvertible"
final val BoxedNumber: TypeName = "System.IConvertible"
final val Class: TypeName = "System.Type"
- final val Code: TypeName = tpnme.NO_NAME
final val Delegate: TypeName = "System.MulticastDelegate"
final val IOOBException: TypeName = "System.IndexOutOfRangeException"
final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException"
@@ -879,7 +888,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
private class J2SENames extends JavaNames {
final val BeanProperty: TypeName = "scala.beans.BeanProperty"
final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty"
- final val Code: TypeName = "scala.reflect.Code"
final val JavaSerializable: TypeName = "java.io.Serializable"
}
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 2e799f914a..5ae8f5dbf4 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -8,6 +8,7 @@ package internal
import scala.collection.{ mutable, immutable }
import util._
+import scala.tools.nsc.util.WeakHashSet
abstract class SymbolTable extends api.Universe
with Collections
@@ -15,6 +16,7 @@ abstract class SymbolTable extends api.Universe
with Symbols
with Types
with Kinds
+ with ExistentialsAndSkolems
with Scopes
with Definitions
with Constants
@@ -41,7 +43,7 @@ abstract class SymbolTable extends api.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
-
+
/** Overridden when we know more about what was happening during a failure. */
def supplementErrorMessage(msg: String): String = msg
@@ -77,16 +79,29 @@ abstract class SymbolTable extends api.Universe
type RunId = Int
final val NoRunId = 0
+ // sigh, this has to be public or atPhase doesn't inline.
+ var phStack: List[Phase] = Nil
private var ph: Phase = NoPhase
private var per = NoPeriod
+ final def atPhaseStack: List[Phase] = phStack
final def phase: Phase = ph
final def phase_=(p: Phase) {
//System.out.println("setting phase to " + p)
- assert((p ne null) && p != NoPhase)
+ assert((p ne null) && p != NoPhase, p)
ph = p
- per = (currentRunId << 8) + p.id
+ per = period(currentRunId, p.id)
+ }
+ final def pushPhase(ph: Phase): Phase = {
+ val current = phase
+ phase = ph
+ phStack ::= ph
+ current
+ }
+ final def popPhase(ph: Phase) {
+ phStack = phStack.tail
+ phase = ph
}
/** The current compiler run identifier. */
@@ -111,18 +126,23 @@ abstract class SymbolTable extends api.Universe
final def phaseOf(period: Period): Phase = phaseWithId(phaseId(period))
final def period(rid: RunId, pid: Phase#Id): Period =
- (currentRunId << 8) + pid
+ (rid << 8) + pid
/** Perform given operation at given phase. */
@inline final def atPhase[T](ph: Phase)(op: => T): T = {
- val current = phase
- phase = ph
+ val saved = pushPhase(ph)
try op
- finally phase = current
+ finally popPhase(saved)
}
+
- @inline final def afterPhase[T](ph: Phase)(op: => T): T =
- atPhase(ph.next)(op)
+ /** Since when it is to be "at" a phase is inherently ambiguous,
+ * a couple unambiguously named methods.
+ */
+ @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op)
+ @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op)
+ @inline final def afterCurrentPhase[T](op: => T): T = atPhase(phase.next)(op)
+ @inline final def beforePrevPhase[T](op: => T): T = atPhase(phase.prev)(op)
@inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T =
if (target != NoPhase && phase.id > target.id) atPhase(target)(op) else op
@@ -257,9 +277,10 @@ abstract class SymbolTable extends api.Universe
}
}
- def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
- def newMap[K, V]() = recordCache(mutable.HashMap[K, V]())
- def newSet[K]() = recordCache(mutable.HashSet[K]())
+ def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
+ def newMap[K, V]() = recordCache(mutable.HashMap[K, V]())
+ def newSet[K]() = recordCache(mutable.HashSet[K]())
+ def newWeakSet[K <: AnyRef]() = recordCache(new WeakHashSet[K]())
}
/** Break into repl debugger if assertion is true. */
@@ -276,7 +297,7 @@ abstract class SymbolTable extends api.Universe
/** The phase which has given index as identifier. */
val phaseWithId: Array[Phase]
-
+
/** Is this symbol table part of reflexive mirror? In this case
* operations need to be made thread safe.
*/
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 415b32958d..853046e81a 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -17,7 +17,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
protected var ids = 0
-
+
val emptySymbolArray = new Array[Symbol](0)
def symbolCount = ids // statistics
@@ -38,14 +38,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
nextexid += 1
newTypeName("_" + nextexid + suffix)
}
-
+
// Set the fields which point companions at one another. Returns the module.
def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
moduleClass.sourceModule = m
m setModuleClass moduleClass
m
}
-
+
/** Create a new free variable. Its owner is NoSymbol.
*/
def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar =
@@ -61,13 +61,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case n: TermName => newTermSymbol(n, pos, newFlags)
case n: TypeName => newTypeSymbol(n, pos, newFlags)
}
- def typeSig: Type = info
- def typeSigIn(site: Type): Type = site.memberInfo(this)
+ def enclosingClass: Symbol = enclClass
+ def enclosingMethod: Symbol = enclMethod
+ def thisPrefix: Type = thisType
+ def selfType: Type = typeOfThis
+ def typeSignature: Type = info
+ def typeSignatureIn(site: Type): Type = site memberInfo this
+
def asType: Type = tpe
def asTypeIn(site: Type): Type = site.memberType(this)
def asTypeConstructor: Type = typeConstructor
def setInternalFlags(flag: Long): this.type = { setFlag(flag); this }
- def setTypeSig(tpe: Type): this.type = { setInfo(tpe); this }
+ def setTypeSignature(tpe: Type): this.type = { setInfo(tpe); this }
def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
}
@@ -84,31 +89,36 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
private[this] var _rawname = initName
private[this] var _rawflags = 0L
-
+
def rawowner = _rawowner
def rawname = _rawname
def rawflags = _rawflags
-
+
protected def rawflags_=(x: FlagsType) { _rawflags = x }
-
+
private var rawpos = initPos
-
+
val id = nextId() // identity displayed when -uniqid
private[this] var _validTo: Period = NoPeriod
-
+
def validTo = _validTo
def validTo_=(x: Period) { _validTo = x}
def pos = rawpos
def setPos(pos: Position): this.type = { this.rawpos = pos; this }
- override def hasModifier(mod: Modifier.Value) =
+ /** !!! The logic after "hasFlag" is far too opaque to be unexplained.
+ * I'm guessing it's attempting to compensate for flag overloading,
+ * and embedding such logic in an undocumented island like this is a
+ * notarized guarantee of future breakage.
+ */
+ override def hasModifier(mod: Modifier) =
hasFlag(flagOfModifier(mod)) &&
(!(mod == Modifier.bynameParameter) || isTerm) &&
(!(mod == Modifier.covariant) || isType)
- override def allModifiers: Set[Modifier.Value] =
+ override def modifiers: Set[Modifier] =
Modifier.values filter hasModifier
// ------ creators -------------------------------------------------------------------
@@ -169,10 +179,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
new TermSymbol(this, pos, name) initFlags newFlags
-
+
def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
new AbstractTypeSymbol(this, pos, name) initFlags newFlags
-
+
def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
new AliasTypeSymbol(this, pos, name) initFlags newFlags
@@ -184,10 +194,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
new ClassSymbol(this, pos, name) initFlags newFlags
-
+
def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
new ModuleClassSymbol(this, pos, name) initFlags newFlags
-
+
/** Derive whether it is an abstract type from the flags; after creation
* the DEFERRED flag will be ignored.
*/
@@ -196,7 +206,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
newAliasTypeSymbol(name, pos, newFlags)
else
newAbstractTypeSymbol(name, pos, newFlags)
-
+
def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
if ((newFlags & DEFERRED) == 0L)
new TypeSkolem(this, pos, name, origin) initFlags newFlags
@@ -233,7 +243,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def newAliasType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
newAliasTypeSymbol(name, pos, newFlags)
-
+
/** Symbol of an abstract type type T >: ... <: ...
*/
final def newAbstractType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
@@ -251,7 +261,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def freshName() = { cnt += 1; nme.syntheticParamName(cnt) }
mmap(argtypess)(tp => newValueParameter(freshName(), focusPos(owner.pos), SYNTHETIC) setInfo tp)
}
-
+
def newSyntheticTypeParam(): Symbol = newSyntheticTypeParam("T0", 0L)
def newSyntheticTypeParam(name: String, newFlags: Long): Symbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
def newSyntheticTypeParams(num: Int): List[Symbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
@@ -259,9 +269,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new existential type skolem with this symbol its owner,
* based on the given symbol and origin.
*/
- def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem = {
- val skolem = newTypeSkolemSymbol(basis.name.toTypeName, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
- skolem setInfo (basis.info cloneInfo skolem)
+ def newExistentialSkolem(basis: Symbol, origin: AnyRef, name: TypeName = null, info: Type = null): TypeSkolem = {
+ val skolem = newTypeSkolemSymbol(if (name eq null) basis.name.toTypeName else name, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
+ skolem setInfo (if (info eq null) basis.info cloneInfo skolem else info)
}
final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
@@ -292,7 +302,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
newClassSymbol(name, pos, newFlags)
-
+
/** A new class with its info set to a ClassInfoType with given scope and parents. */
def newClassWithInfo(name: TypeName, parents: List[Type], scope: Scope, pos: Position = NoPosition, newFlags: Long = 0L) = {
val clazz = newClass(name, pos, newFlags)
@@ -344,9 +354,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newAliasType(pos: Position, name: TypeName): Symbol = newAliasType(name, pos)
@deprecated("Use the other signature", "2.10.0")
def newAbstractType(pos: Position, name: TypeName): Symbol = newAbstractType(name, pos)
- @deprecated("Use the other signature", "2.10.0")
+ @deprecated("Use the other signature", "2.10.0")
def newExistential(pos: Position, name: TypeName): Symbol = newExistential(name, pos)
- @deprecated("Use the other signature", "2.10.0")
+ @deprecated("Use the other signature", "2.10.0")
def newMethod(pos: Position, name: TermName): MethodSymbol = newMethod(name, pos)
// ----- locking and unlocking ------------------------------------------------------
@@ -830,7 +840,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def addModuleSuffix(n: Name): Name =
if (needsModuleSuffix) n append nme.MODULE_SUFFIX_STRING else n
-
+
def moduleSuffix: String = (
if (needsModuleSuffix) nme.MODULE_SUFFIX_STRING
else ""
@@ -838,15 +848,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Whether this symbol needs nme.MODULE_SUFFIX_STRING (aka $) appended on the java platform.
*/
def needsModuleSuffix = (
- hasModuleFlag
+ hasModuleFlag
&& !isMethod
&& !isImplClass
&& !isJavaDefined
)
/** These should be moved somewhere like JavaPlatform.
*/
- def javaSimpleName: String = addModuleSuffix(nme.dropLocalSuffix(simpleName)).toString
- def javaBinaryName: String = addModuleSuffix(fullNameInternal('/')).toString
+ def javaSimpleName: Name = addModuleSuffix(nme.dropLocalSuffix(simpleName))
+ def javaBinaryName: Name = addModuleSuffix(fullNameInternal('/'))
def javaClassName: String = addModuleSuffix(fullNameInternal('.')).toString
/** The encoded full path name of this symbol, where outer names and inner names
@@ -865,7 +875,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (owner.isEffectiveRoot) name
else effectiveOwner.enclClass.fullNameAsName(separator) append separator append name
)
-
+
def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
/** The encoded full path name of this symbol, where outer names and inner names
@@ -1015,9 +1025,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Modifies this symbol's info in place. */
def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
/** Substitute second list of symbols for first in current info. */
- def substInfo(syms0: List[Symbol], syms1: List[Symbol]) = modifyInfo(_.substSym(syms0, syms1))
- def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
-
+ def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
+ if (syms0.isEmpty) this
+ else modifyInfo(_.substSym(syms0, syms1))
+
+ def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
+
/** Set the info and enter this symbol into the owner's scope. */
def setInfoAndEnter(info: Type): this.type = {
setInfo(info)
@@ -1155,7 +1168,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
abort("typeConstructor inapplicable for " + this)
/** The logic approximately boils down to finding the most recent phase
- * which immediately follows any of namer, typer, or erasure.
+ * which immediately follows any of parser, namer, typer, or erasure.
+ * In effect that means this will return one of:
+ *
+ * - packageobjects (follows namer)
+ * - superaccessors (follows typer)
+ * - lazyvals (follows erasure)
+ * - null
*/
private def unsafeTypeParamPhase = {
var ph = phase
@@ -1273,14 +1292,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** After the typer phase (before, look at the definition's Modifiers), contains
* the annotations attached to member a definition (class, method, type, field).
*/
- def annotations: List[AnnotationInfo] = _annotations
+ def annotations: List[AnnotationInfo] = {
+ // Necessary for reflection, see SI-5423
+ if (inReflexiveMirror)
+ initialize
- /** This getter is necessary for reflection, see https://issues.scala-lang.org/browse/SI-5423
- * We could auto-inject completion into `annotations' and `setAnnotations', but I'm not sure about that
- * @odersky writes: I fear we can't do the forcing for all compiler symbols as that could introduce cycles
- */
- def getAnnotations: List[AnnotationInfo] = {
- initialize
_annotations
}
@@ -1329,7 +1345,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def isNestedIn(that: Symbol): Boolean =
owner == that || owner != NoSymbol && (owner isNestedIn that)
-
+
/** Is this class symbol a subclass of that symbol,
* and is this class symbol also different from Null or Nothing? */
def isNonBottomSubClass(that: Symbol): Boolean = false
@@ -1375,15 +1391,25 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
cloneSymbol(owner)
/** A clone of this symbol, but with given owner. */
- final def cloneSymbol(owner: Symbol): Symbol = cloneSymbol(owner, this.rawflags)
- final def cloneSymbol(owner: Symbol, newFlags: Long): Symbol = {
- val newSym = cloneSymbolImpl(owner, newFlags)
- ( newSym
+ final def cloneSymbol(newOwner: Symbol): Symbol =
+ cloneSymbol(newOwner, this.rawflags)
+ final def cloneSymbol(newOwner: Symbol, newFlags: Long): Symbol =
+ cloneSymbol(newOwner, newFlags, nme.NO_NAME)
+ final def cloneSymbol(newOwner: Symbol, newFlags: Long, newName: Name): Symbol = {
+ val clone = cloneSymbolImpl(newOwner, newFlags)
+ ( clone
setPrivateWithin privateWithin
- setInfo (info cloneInfo newSym)
+ setInfo (this.info cloneInfo clone)
setAnnotations this.annotations
)
+ if (clone.thisSym != clone)
+ clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
+ if (newName != nme.NO_NAME)
+ clone.name = newName
+
+ clone
}
+
/** Internal method to clone a symbol's implementation with the given flags and no info. */
def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol
def cloneSymbolImpl(owner: Symbol): Symbol = cloneSymbolImpl(owner, 0L)
@@ -1573,11 +1599,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isMethod || isClass) this
else owner.logicallyEnclosingMember
+ /** Kept for source compatibility with 2.9. Scala IDE for Eclipse relies on this. */
+ @deprecated("Use enclosingTopLevelClass")
+ def toplevelClass: Symbol = enclosingTopLevelClass
+
/** The top-level class containing this symbol. */
- def toplevelClass: Symbol =
+ def enclosingTopLevelClass: Symbol =
if (owner.isPackageClass) {
if (isClass) this else moduleClass
- } else owner.toplevelClass
+ } else owner.enclosingTopLevelClass
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
def isCoDefinedWith(that: Symbol) = (
@@ -1695,6 +1725,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* (which is always the interface, by convention)
* - before erasure, it looks up the interface name in the scope of the owner of the class.
* This only works for implementation classes owned by other classes or traits.
+ * !!! Why?
*/
final def toInterface: Symbol =
if (isImplClass) {
@@ -1847,7 +1878,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Remove any access boundary and clear flags PROTECTED | PRIVATE.
*/
def makePublic = this setPrivateWithin NoSymbol resetFlag AccessFlags
-
+
/** The first parameter to the first argument list of this method,
* or NoSymbol if inapplicable.
*/
@@ -1881,7 +1912,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def sourceFile: AbstractFileType =
if (isModule) moduleClass.sourceFile
- else toplevelClass.sourceFile
+ else enclosingTopLevelClass.sourceFile
def sourceFile_=(f: AbstractFileType) {
abort("sourceFile_= inapplicable for " + this)
@@ -2071,6 +2102,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def infosString = infos.toString()
+ def debugLocationString = fullLocationString + " " + debugFlagString
+ def debugFlagString = hasFlagsToString(-1L)
def hasFlagsToString(mask: Long): String = flagsToString(
flags & mask,
if (hasAccessBoundary) privateWithin.toString else ""
@@ -2129,7 +2162,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def referenced: Symbol = _referenced
def referenced_=(x: Symbol) { _referenced = x }
-
+
def existentialBound = singletonBounds(this.tpe)
def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
@@ -2169,7 +2202,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def setLazyAccessor(sym: Symbol): TermSymbol = {
- assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, hasFlagsToString(-1L), referenced, sym))
+ assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, debugFlagString, referenced, sym))
referenced = sym
this
}
@@ -2223,7 +2256,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
-
+
flatname
}
else rawname.toTermName
@@ -2259,7 +2292,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
res
}
}
-
+
class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
// Temporary programmatic help tracking down who might do such a thing
@@ -2274,13 +2307,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): AliasTypeSymbol =
owner.newAliasTypeSymbol(name, pos, newFlags)
}
-
+
class AbstractTypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) with AbstractTypeMixin {
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): AbstractTypeSymbol =
owner.newAbstractTypeSymbol(name, pos, newFlags)
}
-
+
/** Might be mixed into TypeSymbol or TypeSkolem.
*/
trait AbstractTypeMixin extends TypeSymbol {
@@ -2310,15 +2343,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Overridden in subclasses for which it makes sense.
*/
- def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+this.fullLocationString+ " " + hasFlagsToString(-1L))
+ def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+debugLocationString)
- override def name: TypeName = super.name.asInstanceOf[TypeName]
+ override def name: TypeName = super.name.toTypeName
final override def isType = true
override def isNonClassType = true
override def isAbstractType = {
if (settings.debug.value) {
if (isDeferred) {
- println("TypeSymbol claims to be abstract type: " + this.getClass + " " + hasFlagsToString(-1L) + " at ")
+ println("TypeSymbol claims to be abstract type: " + this.getClass + " " + debugFlagString + " at ")
(new Throwable).printStackTrace
}
}
@@ -2506,19 +2539,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
thisTypeCache
}
-
+
override def owner: Symbol =
if (needsFlatClasses) rawowner.owner else rawowner
override def name: TypeName = (
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
-
+
flatname
}
else rawname.toTypeName
)
-
+
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -2590,7 +2623,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
class FreeVar(name0: TermName, val value: Any) extends TermSymbol(NoSymbol, NoPosition, name0) {
- override def hashCode = value.hashCode
+ override def hashCode = if (value == null) 0 else value.hashCode
override def equals(other: Any): Boolean = other match {
case that: FreeVar => this.value.asInstanceOf[AnyRef] eq that.value.asInstanceOf[AnyRef]
case _ => false
@@ -2613,7 +2646,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def defString: String = toString
override def locationString: String = ""
override def enclClass: Symbol = this
- override def toplevelClass: Symbol = this
+ override def enclosingTopLevelClass: Symbol = this
override def enclMethod: Symbol = this
override def sourceFile: AbstractFileType = null
override def ownerChain: List[Symbol] = List()
@@ -2689,6 +2722,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
cloneSymbols(syms) map (_ modifyInfo infoFn)
+ def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
+ cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
/** Functions which perform the standard clone/substituting on the given symbols and type,
* then call the creator function with the new symbols and type as arguments.
@@ -2701,7 +2736,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val syms1 = cloneSymbolsAtOwner(syms, owner)
creator(syms1, tpe.substSym(syms, syms1))
}
-
+
/** A deep map on a symbol's paramss.
*/
def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
@@ -2725,5 +2760,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
assert(validFrom != NoPeriod)
override def toString() =
"TypeHistory(" + phaseOf(validFrom)+":"+runId(validFrom) + "," + info + "," + prev + ")"
+
+ def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
}
}
diff --git a/src/compiler/scala/reflect/internal/TreeGen.scala b/src/compiler/scala/reflect/internal/TreeGen.scala
index def1350187..89585724f1 100644
--- a/src/compiler/scala/reflect/internal/TreeGen.scala
+++ b/src/compiler/scala/reflect/internal/TreeGen.scala
@@ -249,20 +249,22 @@ abstract class TreeGen {
* var x: T = _
* which is appropriate to the given Type.
*/
- def mkZero(tp: Type): Tree = {
- val tree = tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => Literal(Constant(null))
- }
- tree setType tp
+ def mkZero(tp: Type): Tree = tp.typeSymbol match {
+ case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingClass.tpe
+ case _ => Literal(mkConstantZero(tp)) setType tp
+ }
+
+ def mkConstantZero(tp: Type): Constant = tp.typeSymbol match {
+ case UnitClass => Constant(())
+ case BooleanClass => Constant(false)
+ case FloatClass => Constant(0.0f)
+ case DoubleClass => Constant(0.0d)
+ case ByteClass => Constant(0.toByte)
+ case ShortClass => Constant(0.toShort)
+ case IntClass => Constant(0)
+ case LongClass => Constant(0L)
+ case CharClass => Constant(0.toChar)
+ case _ => Constant(null)
}
def mkZeroContravariantAfterTyper(tp: Type): Tree = {
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index ecd31c784d..769d7a9ed1 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -146,7 +146,7 @@ abstract class TreeInfo {
true
}
-
+
/**
* Selects the correct parameter list when there are nested applications.
* Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
@@ -175,7 +175,7 @@ abstract class TreeInfo {
}
def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
- case _ =>
+ case _ =>
}
/** Is symbol potentially a getter of a variable?
diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala
index e7ba0c793d..5845eda5ca 100644
--- a/src/compiler/scala/reflect/internal/TreePrinters.scala
+++ b/src/compiler/scala/reflect/internal/TreePrinters.scala
@@ -24,21 +24,30 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
}
def quotedName(name: Name): String = quotedName(name, false)
+ private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
+ val sym = tree.symbol
+ if (sym != null && sym != NoSymbol) {
+ val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
+ var suffix = ""
+ if (settings.uniqid.value) suffix += ("#" + sym.id)
+ if (settings.Yshowsymkinds.value) suffix += ("#" + sym.abbreviatedKindString)
+ prefix + tree.symbol.decodedName + suffix
+ } else {
+ quotedName(name, decoded)
+ }
+ }
+
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
+
/** Turns a path into a String, introducing backquotes
* as necessary.
*/
def backquotedPath(t: Tree): String = {
- def suffix(t: Tree) = {
- var suffix = ""
- if (t.hasSymbol && settings.uniqid.value) suffix += ("#" + t.symbol.id)
- if (t.hasSymbol && settings.Yshowsymkinds.value) suffix += ("#" + t.symbol.abbreviatedKindString)
- suffix
- }
-
t match {
- case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), quotedName(name)) + suffix(t)
- case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), quotedName(name)) + suffix(t)
- case Ident(name) => quotedName(name) + suffix(t)
+ case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), symName(t, name))
+ case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), symName(t, name))
+ case Ident(name) => symName(t, name)
case _ => t.toString
}
}
@@ -128,18 +137,6 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
}
private def ifSym(tree: Tree, p: Symbol => Boolean) = symFn(tree, p, false)
- private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
- def nameFn(sym: Symbol) = {
- val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
- val suffix = if (uniqueIds) "#"+sym.id else ""
- prefix + tree.symbol.decodedName + suffix
- }
- symFn(tree, nameFn, quotedName(name, decoded))
- }
-
- def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
- def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
-
def printOpt(prefix: String, tree: Tree) {
if (!tree.isEmpty) { print(prefix, tree) }
}
@@ -307,6 +304,9 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case Assign(lhs, rhs) =>
print(lhs, " = ", rhs)
+ case AssignOrNamedArg(lhs, rhs) =>
+ print(lhs, " = ", rhs)
+
case If(cond, thenp, elsep) =>
print("if (", cond, ")"); indent; println()
print(thenp); undent
@@ -427,7 +427,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case name: Name =>
print(quotedName(name))
case arg =>
- out.print(arg.toString)
+ out.print(if (arg == null) "null" else arg.toString)
}
}
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 0004311647..e576f09f56 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -84,9 +84,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
def withPosition(flag: Long, position: Position) =
copy() setPositions positions + (flag -> position)
- override def hasModifier(mod: Modifier.Value) =
+ override def hasModifier(mod: Modifier) =
hasFlag(flagOfModifier(mod))
- override def allModifiers: Set[Modifier.Value] =
+ override def modifiers: Set[Modifier] =
Modifier.values filter hasModifier
override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
Modifiers(flags, privateWithin, f(annotations)) setPositions positions
@@ -97,7 +97,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List())
def Modifiers(flags: Long): Modifiers = Modifiers(flags, tpnme.EMPTY)
- def Modifiers(mods: Set[Modifier.Value],
+ def Modifiers(mods: Set[Modifier],
privateWithin: Name,
annotations: List[Tree]): Modifiers = {
val flagSet = mods map flagOfModifier
@@ -204,7 +204,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
- DefDef(Modifiers(sym.flags),
+ DefDef(mods,
sym.name.toTermName,
sym.typeParams map TypeDef,
vparamss,
@@ -239,31 +239,24 @@ trait Trees extends api.Trees { self: SymbolTable =>
LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
}
-
/** casedef shorthand */
def CaseDef(pat: Tree, body: Tree): CaseDef = CaseDef(pat, EmptyTree, body)
def Bind(sym: Symbol, body: Tree): Bind =
Bind(sym.name, body) setSymbol sym
+ def Try(body: Tree, cases: (Tree, Tree)*): Try =
+ Try(body, cases.toList map { case (pat, rhs) => CaseDef(pat, EmptyTree, rhs) }, EmptyTree)
- /** Factory method for object creation `new tpt(args_1)...(args_n)`
- * A `New(t, as)` is expanded to: `(new t).<init>(as)`
- */
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
- assert(!argss.isEmpty)
- val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
- (superRef /: argss) (Apply)
- }
- /** 0-1 argument list new, based on a symbol.
- */
- def New(sym: Symbol, args: Tree*): Tree =
- if (args.isEmpty) New(TypeTree(sym.tpe))
- else New(TypeTree(sym.tpe), List(args.toList))
+ def Throw(tpe: Type, args: Tree*): Throw =
+ Throw(New(tpe, args: _*))
def Apply(sym: Symbol, args: Tree*): Tree =
Apply(Ident(sym), args.toList)
+ def New(sym: Symbol, args: Tree*): Tree =
+ New(sym.tpe, args: _*)
+
def Super(sym: Symbol, mix: TypeName): Tree = Super(This(sym), mix)
/** Block factory that flattens directly nested blocks.
@@ -295,7 +288,18 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def traverse(t: Tree) {
if (t != EmptyTree && t.pos == NoPosition) {
t.setPos(pos)
- super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
+ super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
+ // @PP: it's pruning whenever it encounters a node with a
+ // position, which I interpret to mean that (in the author's
+ // mind at least) either the children of a positioned node will
+ // already be positioned, or the children of a positioned node
+ // do not merit positioning.
+ //
+ // Whatever the author's rationale, it does seem like a bad idea
+ // to press on through a positioned node to find unpositioned
+ // children beneath it and then to assign whatever happens to
+ // be in `pos` to such nodes. There are supposed to be some
+ // position invariants which I can't imagine surviving that.
}
}
}
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 89664bad9f..94559aeacd 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -112,13 +112,13 @@ trait Types extends api.Types { self: SymbolTable =>
* to undo constraints in the case of isSubType/isSameType failure.
*/
lazy val undoLog = newUndoLog
-
+
protected def newUndoLog = new UndoLog
-
+
class UndoLog {
private type UndoPairs = List[(TypeVar, TypeConstraint)]
private var log: UndoPairs = List()
-
+
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
@@ -138,7 +138,7 @@ trait Types extends api.Types { self: SymbolTable =>
private[reflect] def record(tv: TypeVar) = {
log ::= ((tv, tv.constr.cloneInternal))
}
-
+
private[scala] def clear() {
if (settings.debug.value)
self.log("Clearing " + log.size + " entries from the undoLog.")
@@ -263,6 +263,7 @@ trait Types extends api.Types { self: SymbolTable =>
def declarations = decls
def typeArguments = typeArgs
def erasedType = transformedType(this)
+ def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
}
/** The base class for all types */
@@ -434,7 +435,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** For a typeref, its arguments. The empty list for all other types */
def typeArgs: List[Type] = List()
-
+
/** A list of placeholder types derived from the type parameters.
* Used by RefinedType and TypeRef.
*/
@@ -531,7 +532,7 @@ trait Types extends api.Types { self: SymbolTable =>
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def decl(name: Name): Symbol = findDecl(name, 0)
-
+
/** A list of all non-private members defined or declared in this type. */
def nonPrivateDecls: List[Symbol] = decls filter (x => !x.isPrivate) toList
@@ -572,7 +573,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
-
+
/** All members with the given flags, excluding bridges.
*/
def membersWithFlags(requiredFlags: Long): List[Symbol] =
@@ -597,7 +598,7 @@ trait Types extends api.Types { self: SymbolTable =>
* an OverloadedSymbol if several exist, NoSymbol if none exist */
def nonLocalMember(name: Name): Symbol =
memberBasedOnName(name, BridgeFlags | LOCAL)
-
+
/** Members excluding and requiring the given flags.
* Note: unfortunately it doesn't work to exclude DEFERRED this way.
*/
@@ -686,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable =>
* symbol.
*/
def substSym(from: List[Symbol], to: List[Symbol]): Type =
- if (from eq to) this
+ if ((from eq to) || from.isEmpty) this
else new SubstSymMap(from, to) apply this
/** Substitute all occurrences of `ThisType(from)` in this type by `to`.
@@ -899,7 +900,7 @@ trait Types extends api.Types { self: SymbolTable =>
def toLongString = {
val str = toString
if (str == "type") widen.toString
- else if (str endsWith ".type") str + " (with underlying type " + widen + ")"
+ else if ((str endsWith ".type") && !typeSymbol.isModuleClass) str + " (with underlying type " + widen + ")"
else str
}
@@ -1124,7 +1125,14 @@ trait Types extends api.Types { self: SymbolTable =>
underlying.baseTypeSeq prepend this
}
override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
- override def safeToString: String = prefixString + "type"
+ override def safeToString: String = {
+ // Avoiding printing Predef.type and scala.package.type as "type",
+ // since in all other cases we omit those prefixes.
+ val pre = underlying.typeSymbol.skipPackageObject
+ if (pre.isOmittablePrefix) pre.fullName + ".type"
+ else prefixString + "type"
+ }
+
/*
override def typeOfThis: Type = typeSymbol.typeOfThis
override def bounds: TypeBounds = TypeBounds(this, this)
@@ -1243,7 +1251,7 @@ trait Types extends api.Types { self: SymbolTable =>
private[reflect] var underlyingPeriod = NoPeriod
override def underlying: Type = {
val cache = underlyingCache
- if (underlyingPeriod == currentPeriod && cache != null) cache
+ if (underlyingPeriod == currentPeriod && cache != null) cache
else {
defineUnderlyingOfSingleType(this)
underlyingCache
@@ -1286,7 +1294,7 @@ trait Types extends api.Types { self: SymbolTable =>
unique(new UniqueSingleType(pre, sym))
}
}
-
+
protected def defineUnderlyingOfSingleType(tpe: SingleType) = {
val period = tpe.underlyingPeriod
if (period != currentPeriod) {
@@ -1356,13 +1364,13 @@ trait Types extends api.Types { self: SymbolTable =>
override def baseTypeSeq: BaseTypeSeq = {
val cached = baseTypeSeqCache
- if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
+ if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
cached
else {
defineBaseTypeSeqOfCompoundType(this)
if (baseTypeSeqCache eq undetBaseTypeSeq)
throw new RecoverableCyclicReference(typeSymbol)
-
+
baseTypeSeqCache
}
}
@@ -1376,7 +1384,7 @@ trait Types extends api.Types { self: SymbolTable =>
defineBaseClassesOfCompoundType(this)
if (baseClassesCache eq null)
throw new RecoverableCyclicReference(typeSymbol)
-
+
baseClassesCache
}
}
@@ -1422,7 +1430,7 @@ trait Types extends api.Types { self: SymbolTable =>
decls.mkString("{", "; ", "}") else "")
)
}
-
+
protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
@@ -1475,7 +1483,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (tpe.baseTypeSeqCache eq undetBaseTypeSeq)
throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
}
-
+
protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
def computeBaseClasses: List[Symbol] =
if (tpe.parents.isEmpty) List(tpe.typeSymbol)
@@ -1757,7 +1765,7 @@ trait Types extends api.Types { self: SymbolTable =>
// override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
override def kind = "ClassInfoType"
-
+
override def safeToString =
if (settings.debug.value || decls.size > 1)
formattedToString
@@ -1807,13 +1815,13 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
+ /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
* with synchronized, because they are accessed only from isVolatile, which is called only from
* Typer.
*/
private var volatileRecursions: Int = 0
private val pendingVolatiles = new mutable.HashSet[Symbol]
-
+
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) with UniqueType {
require(args0.nonEmpty, this)
@@ -1831,7 +1839,7 @@ trait Types extends api.Types { self: SymbolTable =>
asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
}
-
+
// note: does not go through typeRef. There's no need to because
// neither `pre` nor `sym` changes. And there's a performance
// advantage to call TypeRef directly.
@@ -1846,7 +1854,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isHigherKinded = typeParams.nonEmpty
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
-
+
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
if (isHigherKinded) {
if (sameLength(formals intersect typeParams, typeParams))
@@ -1866,9 +1874,9 @@ trait Types extends api.Types { self: SymbolTable =>
res
}
- override def transformInfo(tp: Type): Type =
+ override def transformInfo(tp: Type): Type =
appliedType(asSeenFromOwner(tp), dummyArgs)
-
+
override def narrow =
if (sym.isModuleClass) singleType(pre, sym.sourceModule)
else super.narrow
@@ -1876,14 +1884,14 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeConstructor = this
// eta-expand, subtyping relies on eta-expansion of higher-kinded types
- override protected def normalizeImpl: Type =
+ override protected def normalizeImpl: Type =
if (isHigherKinded) etaExpand else super.normalizeImpl
}
-
+
trait ClassTypeRef extends TypeRef {
// !!! There are scaladoc-created symbols arriving which violate this require.
// require(sym.isClass, sym)
-
+
override protected def normalizeImpl: Type =
if (sym.isRefinementClass) sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
else super.normalizeImpl
@@ -1892,7 +1900,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sym == clazz) this
else transform(sym.info.baseType(clazz))
}
-
+
trait NonClassTypeRef extends TypeRef {
require(sym.isNonClassType, sym)
@@ -1911,11 +1919,11 @@ trait Types extends api.Types { self: SymbolTable =>
}
relativeInfoCache
}
-
+
override def baseType(clazz: Symbol): Type =
if (sym == clazz) this else baseTypeOfNonClassTypeRef(this, clazz)
}
-
+
protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = try {
basetypeRecursions += 1
if (basetypeRecursions < LogPendingBaseTypesThreshold)
@@ -1932,7 +1940,7 @@ trait Types extends api.Types { self: SymbolTable =>
} finally {
basetypeRecursions -= 1
}
-
+
trait AliasTypeRef extends NonClassTypeRef {
require(sym.isAliasType, sym)
@@ -1950,7 +1958,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (typeParamsMatchArgs) betaReduce.normalize
else if (isHigherKinded) super.normalizeImpl
else ErrorType
-
+
// isHKSubType0 introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
// asSeenFrom. asSeenFrom then skips synthetic type params, which
@@ -1960,7 +1968,7 @@ trait Types extends api.Types { self: SymbolTable =>
// this crashes pos/depmet_implicit_tpbetareduce.scala
// appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
def betaReduce = transform(sym.info.resultType)
-
+
// #3731: return sym1 for which holds: pre bound sym.name to sym and
// pre1 now binds sym.name to sym1, conceptually exactly the same
// symbol as sym. The selection of sym on pre must be updated to the
@@ -1974,12 +1982,12 @@ trait Types extends api.Types { self: SymbolTable =>
// TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
case _ => sym
}
-
+
}
trait AbstractTypeRef extends NonClassTypeRef {
require(sym.isAbstractType, sym)
-
+
/** Syncnote: Pure performance caches; no need to synchronize in multi-threaded environment
*/
private var symInfoCache: Type = _
@@ -2008,7 +2016,7 @@ trait Types extends api.Types { self: SymbolTable =>
volatileRecursions -= 1
}
}
-
+
override def thisInfo = {
val symInfo = sym.info
if (thisInfoCache == null || (symInfo ne symInfoCache)) {
@@ -2041,7 +2049,7 @@ trait Types extends api.Types { self: SymbolTable =>
private[reflect] var parentsPeriod = NoPeriod
private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
private[reflect] var baseTypeSeqPeriod = NoPeriod
- private var normalized: Type = _
+ private var normalized: Type = _
// @M: propagate actual type params (args) to `tp`, by replacing
// formal type parameters with actual ones. If tp is higher kinded,
@@ -2063,7 +2071,7 @@ trait Types extends api.Types { self: SymbolTable =>
normalized
}
}
-
+
def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
@@ -2117,12 +2125,12 @@ trait Types extends api.Types { self: SymbolTable =>
}
thisInfo.decls
}
-
+
protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
override def baseTypeSeq: BaseTypeSeq = {
val cache = baseTypeSeqCache
- if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
+ if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
cache
else {
defineBaseTypeSeqOfTypeRef(this)
@@ -2216,7 +2224,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
})
}
-
+
protected def defineParentsOfTypeRef(tpe: TypeRef) = {
val period = tpe.parentsPeriod
if (period != currentPeriod) {
@@ -2228,7 +2236,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
}
-
+
protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
@@ -2388,7 +2396,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
object PolyType extends PolyTypeExtractor
-
+
/** A creator for existential types which flattens nested existentials.
*/
def newExistentialType(quantified: List[Symbol], underlying: Type): Type =
@@ -2439,25 +2447,37 @@ trait Types extends api.Types { self: SymbolTable =>
case _ =>
List()
}
-
+ /** An existential can only be printed with wildcards if:
+ * - the underlying type is a typeref
+ * - where there is a 1-to-1 correspondence between underlying's typeargs and quantified
+ * - and none of the existential parameters is referenced from anywhere else in the type
+ * - and none of the existential parameters are singleton types
+ */
+ private def isRepresentableWithWildcards = !settings.debug.value && {
+ val qset = quantified.toSet
+ !qset.exists(_.isSingletonExistential) && (underlying match {
+ case TypeRef(_, sym, args) =>
+ sameLength(args, quantified) && {
+ args forall { arg =>
+ qset(arg.typeSymbol) && !qset.exists(arg.typeSymbol.info.bounds contains _)
+ }
+ }
+ case _ => false
+ })
+ }
override def safeToString: String = {
- if (!(quantified exists (_.isSingletonExistential)) && !settings.debug.value)
- // try to represent with wildcards first
- underlying match {
- case TypeRef(pre, sym, args) if args.nonEmpty =>
- val wargs = wildcardArgsString(quantified.toSet, args)
- if (sameLength(wargs, args))
- return TypeRef(pre, sym, List()) + wargs.mkString("[", ", ", "]")
- case _ =>
- }
- var ustr = underlying.toString
+ def clauses = {
+ val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
+ if (settings.explaintypes.value) "(" + str + ")" else str
+ }
underlying match {
- case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => ustr = "("+ustr+")"
+ case TypeRef(pre, sym, args) if isRepresentableWithWildcards =>
+ "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
+ case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
+ "(" + underlying + ")" + clauses
case _ =>
+ "" + underlying + clauses
}
- val str =
- ustr+(quantified map (_.existentialToString) mkString(" forSome { ", "; ", " }"))
- if (settings.explaintypes.value) "("+str+")" else str
}
override def cloneInfo(owner: Symbol) =
@@ -2591,7 +2611,7 @@ trait Types extends api.Types { self: SymbolTable =>
else if (args.isEmpty) new HKTypeVar(origin, constr, params)
else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
)
-
+
trace("create", "In " + tv.originLocation)(tv)
}
}
@@ -2632,7 +2652,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isHigherKinded = true
override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
}
-
+
/** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
*/
class AppliedTypeVar(
@@ -2640,17 +2660,17 @@ trait Types extends api.Types { self: SymbolTable =>
_constr: TypeConstraint,
zippedArgs: List[(Symbol, Type)]
) extends TypeVar(_origin, _constr) {
-
+
require(zippedArgs.nonEmpty, this)
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
-
+
override protected def typeVarString = (
zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
)
}
-
+
/** A class representing a type variable: not used after phase `typer`.
*
* A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
@@ -2668,7 +2688,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeArgs: List[Type] = Nil
override def isHigherKinded = false
- /** The constraint associated with the variable
+ /** The constraint associated with the variable
* Syncnote: Type variables are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of `constr`
@@ -2679,7 +2699,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** The variable's skolemization level */
val level = skolemizationLevel
-
+
/** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
* ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
*
@@ -2710,7 +2730,7 @@ trait Types extends api.Types { self: SymbolTable =>
// inference may generate several TypeVar's for a single type parameter that must be inferred,
// only one of them is in the set of tvars that need to be solved, but
// they share the same TypeConstraint instance
-
+
// When comparing to types containing skolems, remember the highest level
// of skolemization. If that highest level is higher than our initial
// skolemizationLevel, we can't re-use those skolems as the solution of this
@@ -2934,7 +2954,7 @@ trait Types extends api.Types { self: SymbolTable =>
def originLocation = {
val sym = origin.typeSymbolDirect
val encl = sym.owner.logicallyEnclosingMember
-
+
// This should display somewhere between one and three
// things which enclose the origin: at most, a class, a
// a method, and a term. At least, a class.
@@ -3278,6 +3298,25 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => abort(debugString(tycon))
}
+ /** A creator for existential types where the type arguments,
+ * rather than being applied directly, are interpreted as the
+ * upper bounds of unknown types. For instance if the type argument
+ * list given is List(AnyRefClass), the resulting type would be
+ * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] .
+ */
+ def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = {
+ tycon match {
+ case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
+ val eparams = typeParamsToExistentials(sym)
+ val bounds = args map (TypeBounds upper _)
+ (eparams, bounds).zipped foreach (_ setInfo _)
+
+ newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
+ case _ =>
+ appliedType(tycon, args)
+ }
+ }
+
/** A creator and extractor for type parameterizations that strips empty type parameter lists.
* Use this factory method to indicate the type has kind * (it's a polymorphic value)
* until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
@@ -3382,7 +3421,7 @@ trait Types extends api.Types { self: SymbolTable =>
mapOver(tp)
}
}
-
+
/** Type with all top-level occurrences of abstract types replaced by their bounds */
def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
case TypeRef(_, sym, _) if sym.isAbstractType =>
@@ -3492,7 +3531,7 @@ trait Types extends api.Types { self: SymbolTable =>
def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
def this() = this(List(), List())
-
+
/* Syncnote: Type constraints are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of any
@@ -3566,7 +3605,7 @@ trait Types extends api.Types { self: SymbolTable =>
val hi = hiBounds filterNot (_.typeSymbolDirect eq AnyClass)
val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
+
lostr ++ histr mkString ("[", " | ", "]")
}
if (inst eq NoType) boundsStr
@@ -3592,7 +3631,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def variance = _variance
def variance_=(x: Int) = _variance = x
-
+
override protected def noChangeToSymbols(origSyms: List[Symbol]) = {
origSyms forall { sym =>
val v = variance
@@ -3759,7 +3798,7 @@ trait Types extends api.Types { self: SymbolTable =>
protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
args mapConserve this
-
+
/** Called by mapOver to determine whether the original symbols can
* be returned, or whether they must be cloned. Overridden in VariantTypeMap.
*/
@@ -3773,7 +3812,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (elems1 eq elems) scope
else newScopeWith(elems1: _*)
}
-
+
/** Map this function over given list of symbols */
def mapOver(origSyms: List[Symbol]): List[Symbol] = {
// fast path in case nothing changes due to map
@@ -3836,7 +3875,7 @@ trait Types extends api.Types { self: SymbolTable =>
def traverse(tp: Type): Unit
def apply(tp: Type): Type = { traverse(tp); tp }
}
-
+
abstract class TypeTraverserWithResult[T] extends TypeTraverser {
def result: T
def clear(): Unit
@@ -3856,13 +3895,13 @@ trait Types extends api.Types { self: SymbolTable =>
*/
// class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
// variance = v
- //
+ //
// def traverse(tp: Type) = tp match {
// case ExistentialType(_, _) if (variance == v) => result = true
// case _ => mapOver(tp)
// }
// }
- //
+ //
// val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
// val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
@@ -3872,6 +3911,8 @@ trait Types extends api.Types { self: SymbolTable =>
eparams map (_ substInfo (tparams, eparams))
}
+ def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
+ typeParamsToExistentials(clazz, clazz.typeParams)
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
@@ -3897,26 +3938,21 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def rawToExistential = new TypeMap {
private var expanded = immutable.Set[Symbol]()
- private var generated = immutable.Set[Type]()
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
if (expanded contains sym) AnyRefClass.tpe
else try {
expanded += sym
- val eparams = mapOver(typeParamsToExistentials(sym, sym.typeParams))
+ val eparams = mapOver(typeParamsToExistentials(sym))
existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
} finally {
expanded -= sym
}
- case ExistentialType(_, _) if !(generated contains tp) => // to avoid infinite expansions. todo: not sure whether this is needed
- val result = mapOver(tp)
- generated += result
- result
case _ =>
mapOver(tp)
}
}
-
+
/** Used by existentialAbstraction.
*/
class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
@@ -3934,10 +3970,10 @@ trait Types extends api.Types { self: SymbolTable =>
countOccs(tpe)
for (tparam <- tparams)
countOccs(tparam.info)
-
+
apply(tpe)
}
-
+
def apply(tp: Type): Type = {
val tp1 = mapOver(tp)
if (variance == 0) tp1
@@ -4313,83 +4349,83 @@ trait Types extends api.Types { self: SymbolTable =>
else mapOver(tp)
}
- class InstantiateDependentMap(params: List[Symbol], actuals: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
- private val actualsIndexed = actuals.toIndexedSeq
+ class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
+ private val actuals = actuals0.toIndexedSeq
+ private val existentials = new Array[Symbol](actuals.size)
+ def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
- object ParamWithActual {
- def unapply(sym: Symbol): Option[Type] = {
- val pid = params indexOf sym
- if(pid != -1) Some(actualsIndexed(pid)) else None
- }
+ private object StableArg {
+ def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
+ tp.isStable && (tp.typeSymbol != NothingClass)
+ )
+ }
+ private object Arg {
+ def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
}
- def apply(tp: Type): Type =
- mapOver(tp) match {
- case SingleType(NoPrefix, ParamWithActual(arg)) if arg.isStable => arg // unsound to replace args by unstable actual #3873
- // (soundly) expand type alias selections on implicit arguments, see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
- case tp1@TypeRef(SingleType(NoPrefix, ParamWithActual(arg)), sym, targs) =>
- val res = typeRef(arg, sym, targs)
- if(res.typeSymbolDirect isAliasType) res.dealias
- else tp1
- case tp1 => tp1 // don't return the original `tp`, which may be different from `tp1`, due to dropping annotations
- }
-
- def existentialsNeeded: List[Symbol] = existSyms.filter(_ ne null).toList
-
- private val existSyms: Array[Symbol] = new Array(actualsIndexed.size)
- private def haveExistential(i: Int) = {assert((i >= 0) && (i <= actualsIndexed.size)); existSyms(i) ne null}
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // unsound to replace args by unstable actual #3873
+ case SingleType(NoPrefix, StableArg(arg)) => arg
+ // (soundly) expand type alias selections on implicit arguments,
+ // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
+ case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
+ val arg = actuals(pid)
+ val res = typeRef(arg, sym, targs)
+ if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
+ // don't return the original `tp`, which may be different from `tp1`,
+ // due to dropping annotations
+ case tp1 => tp1
+ }
/* Return the type symbol for referencing a parameter inside the existential quantifier.
* (Only needed if the actual is unstable.)
*/
- def existSymFor(actualIdx: Int) =
- if (haveExistential(actualIdx)) existSyms(actualIdx)
- else {
- val oldSym = params(actualIdx)
- val symowner = oldSym.owner
- val bound = singletonBounds(actualsIndexed(actualIdx))
-
- val sym = symowner.newExistential(newTypeName(oldSym.name + ".type"), oldSym.pos)
- sym.setInfo(bound)
- sym.setFlag(oldSym.flags)
-
- existSyms(actualIdx) = sym
- sym
+ private def existentialFor(pid: Int) = {
+ if (existentials(pid) eq null) {
+ val param = params(pid)
+ existentials(pid) = (
+ param.owner.newExistential(newTypeName(param.name + ".type"), param.pos, param.flags)
+ setInfo singletonBounds(actuals(pid))
+ )
}
+ existentials(pid)
+ }
//AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+ // TODO: this should be simplified; in the stable case, one can
+ // probably just use an Ident to the tree.symbol.
+ //
+ // @PP: That leads to failure here, where stuff no longer has type
+ // 'String @Annot("stuff")' but 'String @Annot(x)'.
+ //
+ // def m(x: String): String @Annot(x) = x
+ // val stuff = m("stuff")
+ //
+ // (TODO cont.) Why an existential in the non-stable case?
+ //
+ // @PP: In the following:
+ //
+ // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
+ //
+ // m is typed as 'String @Annot(x) forSome { val x: String }'.
+ //
+ // Both examples are from run/constrained-types.scala.
object treeTrans extends Transformer {
- override def transform(tree: Tree): Tree = {
- tree match {
- case RefParamAt(pid) =>
- // TODO: this should be simplified; in the stable case, one can probably
- // just use an Ident to the tree.symbol. Why an existential in the non-stable case?
- val actual = actualsIndexed(pid)
- if (actual.isStable && actual.typeSymbol != NothingClass) {
- gen.mkAttributedQualifier(actualsIndexed(pid), tree.symbol)
- } else {
- val sym = existSymFor(pid)
- (Ident(sym.name)
- copyAttrs tree
- setType typeRef(NoPrefix, sym, Nil))
- }
- case _ => super.transform(tree)
- }
- }
- object RefParamAt {
- def unapply(tree: Tree): Option[Int] = tree match {
- case Ident(_) => Some(params indexOf tree.symbol) filterNot (_ == -1)
- case _ => None
- }
+ override def transform(tree: Tree): Tree = tree.symbol match {
+ case StableArg(actual) =>
+ gen.mkAttributedQualifier(actual, tree.symbol)
+ case Arg(pid) =>
+ val sym = existentialFor(pid)
+ Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
+ case _ =>
+ super.transform(tree)
}
}
-
- treeTrans.transform(arg)
+ treeTrans transform arg
}
}
-
object StripAnnotationsMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case AnnotatedType(_, atp, _) =>
@@ -4514,12 +4550,12 @@ trait Types extends api.Types { self: SymbolTable =>
if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
}
}
-
+
protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
-
+
protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
var result: Symbol = _
-
+
def clear() { result = null }
private def register(sym: Symbol) {
@@ -4537,7 +4573,7 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => mapOver(tp)
}
}
-
+
private lazy val commonOwnerMapObj = new CommonOwnerMap
class MissingAliasControl extends ControlThrowable
@@ -4545,7 +4581,7 @@ trait Types extends api.Types { self: SymbolTable =>
class MissingTypeControl extends ControlThrowable
object adaptToNewRunMap extends TypeMap {
-
+
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
if (phase.flatClasses) {
sym
@@ -4569,7 +4605,7 @@ trait Types extends api.Types { self: SymbolTable =>
var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true)
if (rebind0 == NoSymbol) {
if (sym.isAliasType) throw missingAliasException
- if (settings.debug.value) println(pre+"."+sym+" does no longer exist, phase = "+phase)
+ debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase)
throw new MissingTypeControl // For build manager and presentation compiler purposes
//assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
}
@@ -4625,7 +4661,7 @@ trait Types extends api.Types { self: SymbolTable =>
if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
tp
} else if (sym1 == NoSymbol) {
- if (settings.debug.value) println("adapt fail: "+pre+" "+pre1+" "+sym)
+ debugwarn("adapt fail: "+pre+" "+pre1+" "+sym)
tp
} else {
copyTypeRef(tp, pre1, sym1, args1)
@@ -4712,7 +4748,7 @@ trait Types extends api.Types { self: SymbolTable =>
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2)
pre1 =:= pre2 &&
- forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
+ forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
//if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
if (tparam.variance == 0) arg1 =:= arg2
else if (arg1.isInstanceOf[TypeVar])
@@ -5373,9 +5409,9 @@ trait Types extends api.Types { self: SymbolTable =>
val params2 = mt2.params
val res2 = mt2.resultType
(sameLength(params1, params2) &&
+ mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- (res1 <:< res2.substSym(params2, params1)) &&
- mt1.isImplicit == mt2.isImplicit)
+ (res1 <:< res2.substSym(params2, params1)))
// TODO: if mt1.params.isEmpty, consider NullaryMethodType?
case _ =>
false
@@ -5485,8 +5521,8 @@ trait Types extends api.Types { self: SymbolTable =>
matchesType(tp1, res2, true)
case MethodType(_, _) =>
false
- case PolyType(tparams2, res2) =>
- tparams2.isEmpty && matchesType(tp1, res2, alwaysMatchSimple)
+ case PolyType(_, _) =>
+ false
case _ =>
alwaysMatchSimple || tp1 =:= tp2
}
@@ -5495,14 +5531,16 @@ trait Types extends api.Types { self: SymbolTable =>
tp2 match {
case mt2 @ MethodType(params2, res2) =>
// sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performancewise?)
- matchesQuantified(params1, params2, res1, res2) &&
+ mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- mt1.isImplicit == mt2.isImplicit
+ matchesQuantified(params1, params2, res1, res2)
case NullaryMethodType(res2) =>
if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
else matchesType(tp1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, true)
+ case TypeRef(_, sym, Nil) =>
+ params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
false
}
@@ -5514,13 +5552,18 @@ trait Types extends api.Types { self: SymbolTable =>
matchesType(res1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, true)
+ case TypeRef(_, sym, Nil) if sym.isModuleClass =>
+ matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
matchesType(res1, tp2, alwaysMatchSimple)
}
case PolyType(tparams1, res1) =>
tp2 match {
case PolyType(tparams2, res2) =>
- matchesQuantified(tparams1, tparams2, res1, res2)
+ if ((tparams1 corresponds tparams2)(_ eq _))
+ matchesType(res1, res2, alwaysMatchSimple)
+ else
+ matchesQuantified(tparams1, tparams2, res1, res2)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, true)
case _ =>
@@ -5534,6 +5577,12 @@ trait Types extends api.Types { self: SymbolTable =>
if (alwaysMatchSimple) matchesType(res1, tp2, true)
else lastTry
}
+ case TypeRef(_, sym, Nil) if sym.isModuleClass =>
+ tp2 match {
+ case MethodType(Nil, res2) => matchesType(tp1, res2, alwaysMatchSimple)
+ case NullaryMethodType(res2) => matchesType(tp1, res2, alwaysMatchSimple)
+ case _ => lastTry
+ }
case _ =>
lastTry
}
@@ -5722,8 +5771,8 @@ trait Types extends api.Types { self: SymbolTable =>
val formatted = tableDef.table(transposed)
println("** Depth is " + depth + "\n" + formatted)
}
-
- /** From a list of types, find any which take type parameters
+
+ /** From a list of types, find any which take type parameters
* where the type parameter bounds contain references to other
* any types in the list (including itself.)
*
@@ -5823,7 +5872,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- val initialBTSes = ts map (_.baseTypeSeq.toList filter (_.typeSymbol.isPublic))
+ val initialBTSes = ts map (_.baseTypeSeq.toList)
if (printLubs)
printLubMatrix(ts zip initialBTSes toMap, depth)
@@ -6242,13 +6291,13 @@ trait Types extends api.Types { self: SymbolTable =>
if (ts exists (_.isNotNull)) res.notNull else res
}
-
+
/** A list of the typevars in a type. */
def typeVarsInType(tp: Type): List[TypeVar] = {
var tvs: List[TypeVar] = Nil
tp foreach {
case t: TypeVar => tvs ::= t
- case _ =>
+ case _ =>
}
tvs.reverse
}
@@ -6260,7 +6309,7 @@ trait Types extends api.Types { self: SymbolTable =>
// !!! Is it somehow guaranteed that this will not break under nesting?
// In general one has to save and restore the contents of the field...
tvs foreach (_.suspended = true)
- tvs
+ tvs
}
/** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
@@ -6490,5 +6539,5 @@ trait Types extends api.Types { self: SymbolTable =>
} finally {
tostringRecursions -= 1
}
-
+
}
diff --git a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
index b21b33e138..34163d54f8 100644
--- a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
@@ -846,10 +846,11 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
private val p = phase
override def complete(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
- if (p != phase) atPhase(p) (sym setInfo tp)
- else sym setInfo tp
- if (currentRunId != definedAtRunId) sym.setInfo(adaptToNewRunMap(tp))
- } catch {
+ atPhase(p) (sym setInfo tp)
+ if (currentRunId != definedAtRunId)
+ sym.setInfo(adaptToNewRunMap(tp))
+ }
+ catch {
case e: MissingRequirementError => throw toTypeError(e)
}
override def load(sym: Symbol) { complete(sym) }
@@ -862,13 +863,12 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
override def complete(sym: Symbol) = try {
super.complete(sym)
var alias = at(j, readSymbol)
- if (alias.isOverloaded) {
- atPhase(picklerPhase) {
- alias = alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))
- }
- }
+ if (alias.isOverloaded)
+ alias = atPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+
sym.asInstanceOf[TermSymbol].setAlias(alias)
- } catch {
+ }
+ catch {
case e: MissingRequirementError => throw toTypeError(e)
}
}
diff --git a/src/compiler/scala/reflect/internal/util/Collections.scala b/src/compiler/scala/reflect/internal/util/Collections.scala
index 94672097c4..d26a1abadb 100644
--- a/src/compiler/scala/reflect/internal/util/Collections.scala
+++ b/src/compiler/scala/reflect/internal/util/Collections.scala
@@ -64,7 +64,21 @@ trait Collections {
}
lb.toList
}
+
+ final def foreachWithIndex[A, B](xs: List[A])(f: (A, Int) => Unit) {
+ var index = 0
+ var ys = xs
+ while (!ys.isEmpty) {
+ f(ys.head, index)
+ ys = ys.tail
+ index += 1
+ }
+ }
+ @inline final def findOrElse[A](xs: TraversableOnce[A])(p: A => Boolean)(orElse: => A): A = {
+ xs find p getOrElse orElse
+ }
+
final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = {
val lb = new ListBuffer[B]
var index = 0
@@ -88,7 +102,7 @@ trait Collections {
val x2 = ys2.head
if (p(x1, x2))
buf += ((x1, x2))
-
+
ys1 = ys1.tail
ys2 = ys2.tail
}
@@ -120,7 +134,7 @@ trait Collections {
while (!ys1.isEmpty && !ys2.isEmpty) {
if (f(ys1.head, ys2.head))
return true
-
+
ys1 = ys1.tail
ys2 = ys2.tail
}
@@ -132,7 +146,7 @@ trait Collections {
while (!ys1.isEmpty && !ys2.isEmpty) {
if (!f(ys1.head, ys2.head))
return false
-
+
ys1 = ys1.tail
ys2 = ys2.tail
}
@@ -145,7 +159,7 @@ trait Collections {
while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
if (!f(ys1.head, ys2.head, ys3.head))
return false
-
+
ys1 = ys1.tail
ys2 = ys2.tail
ys3 = ys3.tail
diff --git a/src/compiler/scala/reflect/runtime/ConversionUtil.scala b/src/compiler/scala/reflect/runtime/ConversionUtil.scala
index e75fd78590..8c32026e37 100644
--- a/src/compiler/scala/reflect/runtime/ConversionUtil.scala
+++ b/src/compiler/scala/reflect/runtime/ConversionUtil.scala
@@ -23,7 +23,7 @@ trait ConversionUtil { self: SymbolTable =>
toJavaMap(s) = j
}
- def toScala(key: J)(body: => S): S = synchronized {
+ def toScala(key: J)(body: => S): S = synchronized {
toScalaMap get key match {
case Some(v) =>
v
@@ -34,7 +34,7 @@ trait ConversionUtil { self: SymbolTable =>
}
}
- def toJava(key: S)(body: => J): J = synchronized {
+ def toJava(key: S)(body: => J): J = synchronized {
toJavaMap get key match {
case Some(v) =>
v
diff --git a/src/compiler/scala/reflect/runtime/Mirror.scala b/src/compiler/scala/reflect/runtime/Mirror.scala
index 4808326902..d3e4dd7619 100644
--- a/src/compiler/scala/reflect/runtime/Mirror.scala
+++ b/src/compiler/scala/reflect/runtime/Mirror.scala
@@ -12,28 +12,28 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
import definitions._
- def classWithName(name: String): Symbol = {
+ def symbolForName(name: String): Symbol = {
val clazz = javaClass(name, defaultReflectiveClassLoader())
classToScala(clazz)
}
-
- def getCompanionObject(clazz: Symbol): AnyRef = {
+
+ def companionInstance(clazz: Symbol): AnyRef = {
val singleton = ReflectionUtils.singletonInstance(clazz.fullName, defaultReflectiveClassLoader())
singleton
}
-
- def getClass(obj: AnyRef): Symbol = classToScala(obj.getClass)
- def getType(obj: AnyRef): Type = typeToScala(obj.getClass)
+
+ def symbolOfInstance(obj: Any): Symbol = classToScala(obj.getClass)
+ def typeOfInstance(obj: Any): Type = typeToScala(obj.getClass)
// to do add getClass/getType for instances of primitive types, probably like this:
// def getClass[T <: AnyVal : Manifest](x: T): Symbol = manifest[T].getClass
- def getValue(receiver: AnyRef, field: Symbol): Any = {
+ def getValueOfField(receiver: AnyRef, field: Symbol): Any = {
fieldToJava(field).get(receiver)
}
- def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit = {
+ def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit = {
fieldToJava(field).set(receiver, value)
}
- def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any = {
+ def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any = {
if (meth.owner == ArrayClass) {
meth.name match {
case nme.length => return Array.getLength(receiver)
@@ -41,8 +41,8 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
case nme.update => return Array.set(receiver, args(0).asInstanceOf[Int], args(1))
}
}
-
- val jmeth = methodToJava(meth)
+
+ val jmeth = methodToJava(meth)
jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
}
@@ -51,7 +51,7 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
override def typeToClass(tpe: Type): java.lang.Class[_] = typeToJavaClass(tpe)
override def symbolToClass(sym: Symbol): java.lang.Class[_] = classToJava(sym)
-
+
override def inReflexiveMirror = true
}
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
index 72adbd4004..dd806beb2a 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
@@ -1,22 +1,22 @@
package scala.reflect
package runtime
-trait SynchronizedOps extends internal.SymbolTable
+trait SynchronizedOps extends internal.SymbolTable
with SynchronizedSymbols
with SynchronizedTypes { self: SymbolTable =>
-
+
// Names
-
+
private lazy val nameLock = new Object
-
+
override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) }
override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) }
-
+
// BaseTypeSeqs
-
- override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+
+ override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
-
+
trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
override def apply(i: Int): Type = synchronized { super.apply(i) }
override def rawElem(i: Int) = synchronized { super.rawElem(i) }
@@ -30,9 +30,9 @@ trait SynchronizedOps extends internal.SymbolTable
override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
}
-
+
// Scopes
-
+
override def newScope = new Scope() with SynchronizedScope
override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
index 9baf94f71d..3f2fa30be2 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -6,61 +6,61 @@ import internal.Flags.DEFERRED
trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override protected def nextId() = synchronized { super.nextId() }
-
- override protected def freshExistentialName(suffix: String) =
+
+ override protected def freshExistentialName(suffix: String) =
synchronized { super.freshExistentialName(suffix) }
// Set the fields which point companions at one another. Returns the module.
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
synchronized { super.connectModuleToClass(m, moduleClass) }
-
+
override def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar =
new FreeVar(name, value) with SynchronizedTermSymbol initFlags newFlags setInfo tpe
override protected def makeNoSymbol = new NoSymbol with SynchronizedSymbol
-
+
trait SynchronizedSymbol extends Symbol {
-
+
override def rawowner = synchronized { super.rawowner }
override def rawname = synchronized { super.rawname }
override def rawflags = synchronized { super.rawflags }
-
+
override def rawflags_=(x: FlagsType) = synchronized { super.rawflags_=(x) }
override def name_=(x: Name) = synchronized { super.name_=(x) }
override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
-
+
override def validTo = synchronized { super.validTo }
override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
-
+
override def pos = synchronized { super.pos }
override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
override def privateWithin = synchronized { super.privateWithin }
- override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
+ override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
- override def info = synchronized { super.info }
+ override def info = synchronized { super.info }
override def info_=(info: Type) = synchronized { super.info_=(info) }
- override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
+ override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
override def rawInfo: Type = synchronized { super.rawInfo }
override def typeParams: List[Symbol] = synchronized { super.typeParams }
- override def reset(completer: Type) = synchronized { super.reset(completer) }
+ override def reset(completer: Type) = synchronized { super.reset(completer) }
- override def infosString: String = synchronized { super.infosString }
+ override def infosString: String = synchronized { super.infosString }
override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
- override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
+ override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
// ------ creators -------------------------------------------------------------------
override def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
-
+
override def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
new AbstractTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
-
+
override def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
new AliasTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
@@ -72,10 +72,10 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
new ClassSymbol(this, pos, name) with SynchronizedClassSymbol initFlags newFlags
-
+
override def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
new ModuleClassSymbol(this, pos, name) with SynchronizedModuleClassSymbol initFlags newFlags
-
+
override def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
if ((newFlags & DEFERRED) == 0L)
new TypeSkolem(this, pos, name, origin) with SynchronizedTypeSymbol initFlags newFlags
@@ -116,4 +116,4 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def implicitMembers: List[Symbol] = synchronized { super.implicitMembers }
}
}
-
+
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
index c842d3dd01..e5a508f802 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
@@ -2,86 +2,86 @@ package scala.reflect
package runtime
/** This trait overrides methods in reflect.internal, bracketing
- * them in synchronized { ... } to make them thread-safe
+ * them in synchronized { ... } to make them thread-safe
*/
trait SynchronizedTypes extends internal.Types { self: SymbolTable =>
-
+
// No sharing of map objects:
override protected def commonOwnerMap = new CommonOwnerMap
-
+
private val uniqueLock = new Object
override def unique[T <: Type](tp: T): T = uniqueLock.synchronized { super.unique(tp) }
-
+
class SynchronizedUndoLog extends UndoLog {
-
- override def clear() =
+
+ override def clear() =
synchronized { super.clear() }
-
+
override def undo[T](block: => T): T =
synchronized { super.undo(block) }
-
+
override def undoUnless(block: => Boolean): Boolean =
synchronized { super.undoUnless(block) }
}
-
+
override protected def newUndoLog = new SynchronizedUndoLog
-
- override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
+
+ override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
-
- private val subsametypeLock = new Object
-
+
+ private val subsametypeLock = new Object
+
override def isSameType(tp1: Type, tp2: Type): Boolean =
subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
-
+
override def isDifferentType(tp1: Type, tp2: Type): Boolean =
subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
-
+
override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean =
subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
-
+
private val lubglbLock = new Object
-
+
override def glb(ts: List[Type]): Type =
lubglbLock.synchronized { super.glb(ts) }
-
+
override def lub(ts: List[Type]): Type =
lubglbLock.synchronized { super.lub(ts) }
-
+
private val indentLock = new Object
-
+
override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
indentLock.synchronized { super.explain(op, p, tp1, arg2) }
}
-
+
private val toStringLock = new Object
override protected def typeToString(tpe: Type): String =
toStringLock.synchronized(super.typeToString(tpe))
-
- /* The idea of caches is as follows.
+
+ /* The idea of caches is as follows.
* When in reflexive mode, a cache is either null, or one sentinal
* value representing undefined or the final defined
* value. Hence, we can ask in non-synchronized ode whether the cache field
- * is non null and different from the sentinel (if a sentinel exists).
+ * is non null and different from the sentinel (if a sentinel exists).
* If that's true, the cache value is current.
* Otherwise we arrive in one of the defined... methods listed below
* which go through all steps in synchronized mode.
*/
-
+
override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
-
- override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
+
+ override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
- override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
+ override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
-
- override protected def defineParentsOfTypeRef(tpe: TypeRef) =
+
+ override protected def defineParentsOfTypeRef(tpe: TypeRef) =
tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
-
- override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
+
+ override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index 70a3061fc7..8cc4d5f788 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -44,11 +44,11 @@ trait ToolBoxes extends { self: Universe =>
// !!! Why is this is in the empty package? If it's only to make
// it inaccessible then please put it somewhere designed for that
// rather than polluting the empty package with synthetics.
- trace("typing: ")(showAttributed(tree))
+ trace("typing: ")(showAttributed(tree, true, true, settings.Yshowsymkinds.value))
val ownerClass = EmptyPackageClass.newClassWithInfo(newTypeName("<expression-owner>"), List(ObjectClass.tpe), newScope)
val owner = ownerClass.newLocalDummy(tree.pos)
val ttree = typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt)
- trace("typed: ")(showAttributed(ttree))
+ trace("typed: ")(showAttributed(ttree, true, true, settings.Yshowsymkinds.value))
ttree
}
@@ -64,7 +64,7 @@ trait ToolBoxes extends { self: Universe =>
obj setInfo obj.moduleClass.tpe
val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName))
def makeParam(fv: Symbol) = meth.newValueParameter(fv.name.toTermName) setInfo fv.tpe
- meth setInfo MethodType(fvs map makeParam, expr.tpe)
+ meth setInfo MethodType(fvs map makeParam, AnyClass.tpe)
minfo.decls enter meth
trace("wrapping ")(defOwner(expr) -> meth)
val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth))
@@ -78,9 +78,9 @@ trait ToolBoxes extends { self: Universe =>
List(List()),
List(methdef),
NoPosition))
- trace("wrapped: ")(showAttributed(moduledef))
+ trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
val cleanedUp = resetLocalAttrs(moduledef)
- trace("cleaned up: ")(showAttributed(cleanedUp))
+ trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
cleanedUp
}
@@ -94,6 +94,20 @@ trait ToolBoxes extends { self: Universe =>
}
def compileExpr(expr: Tree, fvs: List[Symbol]): String = {
+ // Previously toolboxes used to typecheck their inputs before compiling.
+ // Actually, the initial demo by Martin first typechecked the reified tree,
+ // then ran it, which typechecked it again, and only then launched the
+ // reflective compiler.
+ //
+ // However, as observed in https://issues.scala-lang.org/browse/SI-5464
+ // current implementation typechecking is not always idempotent.
+ // That's why we cannot allow inputs of toolboxes to be typechecked,
+ // at least not until the aforementioned issue is closed.
+ val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
+ if (!typed.isEmpty) {
+ throw new Error("cannot compile trees that are already typed")
+ }
+
val mdef = wrapInObject(expr, fvs)
val pdef = wrapInPackage(mdef)
val unit = wrapInCompilationUnit(pdef)
@@ -106,7 +120,6 @@ trait ToolBoxes extends { self: Universe =>
jclazz.getDeclaredMethods.find(_.getName == name).get
def runExpr(expr: Tree): Any = {
- val etpe = expr.tpe
val fvs = (expr filter isFree map (_.symbol)).distinct
reporter.reset()
@@ -164,7 +177,13 @@ trait ToolBoxes extends { self: Universe =>
}
command.settings.outputDirs setSingleOutput virtualDirectory
- new ToolBoxGlobal(command.settings, reporter)
+ val instance = new ToolBoxGlobal(command.settings, reporter)
+
+ // need to establish a run an phase because otherwise we run into an assertion in TypeHistory
+ // that states that the period must be different from NoPeriod
+ val run = new instance.Run
+ instance.phase = run.refchecksPhase
+ instance
}
lazy val importer = new compiler.Importer {
@@ -175,22 +194,13 @@ trait ToolBoxes extends { self: Universe =>
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, defaultReflectiveClassLoader)
- private def importAndTypeCheck(tree: rm.Tree, expectedType: rm.Type): compiler.Tree = {
- // need to establish a run an phase because otherwise we run into an assertion in TypeHistory
- // that states that the period must be different from NoPeriod
- val run = new compiler.Run
- compiler.phase = run.refchecksPhase
+ def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = {
+ if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType)
val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
val pt: compiler.Type = importer.importType(expectedType.asInstanceOf[Type])
-// val typer = compiler.typer.atOwner(ctree, if (owner.isModule) cowner.moduleClass else cowner)
val ttree: compiler.Tree = compiler.typedTopLevelExpr(ctree, pt)
- ttree
- }
-
- def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = {
- if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType)
- val ttree = importAndTypeCheck(tree, expectedType)
- exporter.importTree(ttree).asInstanceOf[rm.Tree]
+ val rmttree = exporter.importTree(ttree).asInstanceOf[rm.Tree]
+ rmttree
}
def typeCheck(tree: rm.Tree): rm.Tree =
@@ -199,11 +209,10 @@ trait ToolBoxes extends { self: Universe =>
def showAttributed(tree: rm.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]), printTypes, printIds, printKinds)
- def runExpr(tree: rm.Tree, expectedType: rm.Type): Any = {
- val ttree = importAndTypeCheck(tree, expectedType)
- compiler.runExpr(ttree)
+ def runExpr(tree: rm.Tree): Any = {
+ if (compiler.settings.verbose.value) println("running "+tree)
+ val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
+ compiler.runExpr(ctree)
}
-
- def runExpr(tree: rm.Tree): Any = runExpr(tree, WildcardType.asInstanceOf[rm.Type])
}
}
diff --git a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
index fc4177e956..61001a4778 100644
--- a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
+++ b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
@@ -2,10 +2,12 @@ package scala.reflect
package runtime
trait TreeBuildUtil extends Universe with api.TreeBuildUtil {
-
- def staticClass(fullname: String): Symbol = definitions.getRequiredClass(fullname)
- def staticModule(fullname: String): Symbol = definitions.getRequiredModule(fullname)
- def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.thisType
+ /** A comment to the effect of why initialize was added to all these
+ * would be appreciated. (We may as well start somewhere.)
+ */
+ def staticClass(fullname: String) = definitions.getRequiredClass(fullname).initialize
+ def staticModule(fullname: String) = definitions.getRequiredModule(fullname).initialize
+ def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.initialize.thisType
/** Selects type symbol with given name from the defined members of prefix type
*/
@@ -39,7 +41,7 @@ trait TreeBuildUtil extends Universe with api.TreeBuildUtil {
selectIn(owner.info, idx)
}
- def freeVar(name: String, info: Type, value: Any) = newFreeVar(newTermName(name), info, value)
+ def newFreeVar(name: String, info: Type, value: Any) = newFreeVar(newTermName(name), info, value)
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers =
Modifiers(flags, privateWithin, annotations)
diff --git a/src/compiler/scala/reflect/runtime/Universe.scala b/src/compiler/scala/reflect/runtime/Universe.scala
index c786bb86c5..700f819226 100644
--- a/src/compiler/scala/reflect/runtime/Universe.scala
+++ b/src/compiler/scala/reflect/runtime/Universe.scala
@@ -16,7 +16,7 @@ class Universe extends SymbolTable {
val gen = new TreeGen { val global: Universe.this.type = Universe.this }
- def settings = new Settings
+ lazy val settings = new Settings
def forInteractive = false
def forScaladoc = false
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 04ff0c440d..3c79fcd3fb 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -90,7 +90,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for properties that refer to compiler phases. */
object CompilerPhase extends PermissibleValue {
- val values = List("namer", "typer", "pickler", "refchecks", "liftcode",
+ val values = List("namer", "typer", "pickler", "refchecks",
"uncurry", "tailcalls", "specialize", "explicitouter",
"erasure", "lazyvals", "lambdalift", "constructors",
"flatten", "mixin", "cleanup", "icode", "inliner",
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index 253d1dec5d..c92474b33e 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -43,7 +43,8 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `deprecation`,
* - `docgenerator`,
* - `docrootcontent`,
- * - `unchecked`.
+ * - `unchecked`,
+ * - `nofail`.
*
* It also takes the following parameters as nested elements:
* - `src` (for srcdir),
@@ -123,6 +124,9 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the compiler to generate unchecked information. */
private var unchecked: Boolean = false
+ /** Instruct the ant task not to fail in the event of errors */
+ private var nofail: Boolean = false
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
@@ -353,6 +357,17 @@ class Scaladoc extends ScalaMatchingTask {
docUncompilable = Some(input)
}
+ /** Set the `nofail` info attribute.
+ *
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off.
+ */
+ def setNoFail(input: String) {
+ if (Flag.isPermissible(input))
+ nofail = "yes".equals(input) || "on".equals(input)
+ else
+ buildError("Unknown nofail flag '" + input + "'")
+ }
+
/*============================================================================*\
** Properties getters **
\*============================================================================*/
@@ -553,6 +568,8 @@ class Scaladoc extends ScalaMatchingTask {
Pair(docSettings, sourceFiles)
}
+ def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
+
/** Performs the compilation. */
override def execute() = {
val Pair(docSettings, sourceFiles) = initialize
@@ -561,7 +578,7 @@ class Scaladoc extends ScalaMatchingTask {
val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
docProcessor.document(sourceFiles.map (_.toString))
if (reporter.ERROR.count > 0)
- buildError(
+ safeBuildError(
"Document failed with " +
reporter.ERROR.count + " error" +
(if (reporter.ERROR.count > 1) "s" else "") +
@@ -576,11 +593,11 @@ class Scaladoc extends ScalaMatchingTask {
} catch {
case exception: Throwable if exception.getMessage ne null =>
exception.printStackTrace()
- buildError("Document failed because of an internal documenter error (" +
+ safeBuildError("Document failed because of an internal documenter error (" +
exception.getMessage + "); see the error output for details.")
case exception =>
exception.printStackTrace()
- buildError("Document failed because of an internal documenter error " +
+ safeBuildError("Document failed because of an internal documenter error " +
"(no error message provided); see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index 7e51930fa4..599936f6f8 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -128,9 +128,11 @@ if [[ -z "$cygwin$mingw" ]]; then
usebootcp="true"
fi
+# If using the boot classpath, also pass an empty classpath
+# to java to suppress "." from materializing.
classpathArgs () {
if [[ -n $usebootcp ]]; then
- echo "-Xbootclasspath/a:$TOOL_CLASSPATH"
+ echo "-Xbootclasspath/a:$TOOL_CLASSPATH -classpath \"\""
else
echo "-classpath $TOOL_CLASSPATH"
fi
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index ab4a4a4402..0869350dd3 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -30,7 +30,7 @@ trait AnyValReps {
" * @return the bitwise negation of this value\n" +
" * @example {{{\n" +
" * ~5 == -6\n" +
- " * // in binary: ~00000101 == \n" +
+ " * // in binary: ~00000101 ==\n" +
" * // 11111010\n" +
" * }}}\n" +
" */") :: ops
@@ -44,9 +44,9 @@ trait AnyValReps {
" * @return the bitwise OR of this value and x\n" +
" * @example {{{\n" +
" * (0xf0 | 0xaa) == 0xfa\n" +
- " * // in binary: 11110000 \n" +
- " * // | 10101010 \n" +
- " * // -------- \n" +
+ " * // in binary: 11110000\n" +
+ " * // | 10101010\n" +
+ " * // --------\n" +
" * // 11111010\n" +
" * }}}\n" +
" */"),
@@ -54,9 +54,9 @@ trait AnyValReps {
" * @return the bitwise AND of this value and x\n" +
" * @example {{{\n" +
" * (0xf0 & 0xaa) == 0xa0\n" +
- " * // in binary: 11110000 \n" +
- " * // & 10101010 \n" +
- " * // -------- \n" +
+ " * // in binary: 11110000\n" +
+ " * // & 10101010\n" +
+ " * // --------\n" +
" * // 10100000\n" +
" * }}}\n" +
" */"),
@@ -64,9 +64,9 @@ trait AnyValReps {
" * @return the bitwise XOR of this value and x\n" +
" * @example {{{\n" +
" * (0xf0 ^ 0xaa) == 0x5a\n" +
- " * // in binary: 11110000 \n" +
- " * // ^ 10101010 \n" +
- " * // -------- \n" +
+ " * // in binary: 11110000\n" +
+ " * // ^ 10101010\n" +
+ " * // --------\n" +
" * // 01011010\n" +
" * }}}\n" +
" */"))
@@ -83,11 +83,11 @@ trait AnyValReps {
Op(">>>", "/**\n" +
" * @return this value bit-shifted right by the specified number of bits,\n" +
- " * filling the new left bits with zeroes. \n" +
+ " * filling the new left bits with zeroes.\n" +
" * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}\n" +
" * @example {{{\n" +
- " * -21 >>> 3 == 536870909 \n" +
- " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == \n" +
+ " * -21 >>> 3 == 536870909\n" +
+ " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==\n" +
" * // 00011111 11111111 11111111 11111101\n" +
" * }}}\n" +
" */"),
@@ -97,8 +97,8 @@ trait AnyValReps {
" * filling in the right bits with the same value as the left-most bit of this.\n" +
" * The effect of this is to retain the sign of the value.\n" +
" * @example {{{\n" +
- " * -21 >> 3 == -3 \n" +
- " * // in binary: 11111111 11111111 11111111 11101011 >> 3 == \n" +
+ " * -21 >> 3 == -3\n" +
+ " * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==\n" +
" * // 11111111 11111111 11111111 11111101\n" +
" * }}}\n" +
" */"))
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 940d115b2f..d6f57801e7 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -74,7 +74,7 @@ trait CompilationUnits { self: Global =>
* It is empty up to phase 'icode'.
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
-
+
def echo(pos: Position, msg: String) =
reporter.echo(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 8e5ca2156a..248d5d675d 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -37,6 +37,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
with Plugins
with PhaseAssembly
with Trees
+ with Reifiers
with TreePrinters
with DocComments
with MacroContext
@@ -58,7 +59,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
type AbstractFileType = scala.tools.nsc.io.AbstractFile
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
-
+
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
// platform specific elements
@@ -124,7 +125,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Print tree in detailed form */
object nodePrinters extends {
val global: Global.this.type = Global.this
- } with NodePrinters {
+ } with NodePrinters with ReifyPrinters {
infolevel = InfoLevel.Verbose
}
@@ -134,6 +135,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
} with TreeBrowsers
val nodeToString = nodePrinters.nodeToString
+ val reifiedNodeToString = nodePrinters.reifiedNodeToString
val treeBrowser = treeBrowsers.create()
// ------------ Hooks for interactive mode-------------------------
@@ -152,7 +154,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Register top level class (called on entering the class)
*/
def registerTopLevelSym(sym: Symbol) {}
-
+
// ------------------ Reporting -------------------------------------
// not deprecated yet, but a method called "error" imported into
@@ -191,10 +193,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
if (settings.debug.value)
body
}
- @inline final override def debuglog(msg: => String) {
- if (settings.debug.value && (settings.log containsPhase globalPhase))
- inform("[log " + phase + "] " + msg)
- }
// Warnings issued only under -Ydebug. For messages which should reach
// developer ears, but are not adequately actionable by users.
@inline final override def debugwarn(msg: => String) {
@@ -211,10 +209,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
+
+ def logAfterEveryPhase[T](msg: String)(op: => T) {
+ log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
+ }
+
+ def shouldLogAtThisPhase = (
+ (settings.log.isSetByUser)
+ && ((settings.log containsPhase globalPhase) || (settings.log containsPhase phase))
+ )
+ def atPhaseStackMessage = atPhaseStack match {
+ case Nil => ""
+ case ps => ps.reverseMap("->" + _).mkString("(", " ", ")")
+ }
// Over 200 closure objects are eliminated by inlining this.
- @inline final def log(msg: => AnyRef): Unit =
- if (settings.log containsPhase globalPhase)
- inform("[log " + phase + "] " + msg)
+ @inline final def log(msg: => AnyRef) {
+ if (shouldLogAtThisPhase)
+ inform("[log %s%s] %s".format(globalPhase, atPhaseStackMessage, msg))
+ }
+
+ @inline final override def debuglog(msg: => String) {
+ if (settings.debug.value)
+ log(msg)
+ }
def logThrowable(t: Throwable): Unit = globalError(throwableAsString(t))
def throwableAsString(t: Throwable): String =
@@ -463,17 +480,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
val runsRightAfter = None
} with RefChecks
- // phaseName = "liftcode"
- object liftcode extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("refchecks")
- val runsRightAfter = None
- } with LiftCode
-
// phaseName = "uncurry"
override object uncurry extends {
val global: Global.this.type = Global.this
- val runsAfter = List("refchecks", "liftcode")
+ val runsAfter = List[String]("refchecks")
val runsRightAfter = None
} with UnCurry
@@ -659,7 +669,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
extensionMethods -> "add extension methods for inline classes",
pickler -> "serialize symbol tables",
refChecks -> "reference/override checking, translate nested objects",
- liftcode -> "reify trees",
uncurry -> "uncurry, translate function values to anonymous classes",
tailCalls -> "replace tail calls by jumps",
specializeTypes -> "@specialized-driven class and method specialization",
@@ -709,18 +718,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
private lazy val unitTimings = mutable.HashMap[CompilationUnit, Long]() withDefaultValue 0L // tracking time spent per unit
private def unitTimingsFormatted(): String = {
def toMillis(nanos: Long) = "%.3f" format nanos / 1000000d
-
+
val formatter = new util.TableDef[(String, String)] {
>> ("ms" -> (_._1)) >+ " "
<< ("path" -> (_._2))
}
"" + (
- new formatter.Table(unitTimings.toList sortBy (-_._2) map {
+ new formatter.Table(unitTimings.toList sortBy (-_._2) map {
case (unit, nanos) => (toMillis(nanos), unit.source.path)
})
)
}
-
+
protected def addToPhasesSet(sub: SubComponent, descr: String) {
phasesSet += sub
phasesDescMap(sub) = descr
@@ -767,6 +776,51 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
line1 :: line2 :: descs mkString
}
+ /** Returns List of (phase, value) pairs, including only those
+ * where the value compares unequal to the previous phase's value.
+ */
+ def afterEachPhase[T](op: => T): List[(Phase, T)] = {
+ phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) =>
+ val value = afterPhase(ph)(op)
+ if (res.nonEmpty && res.head._2 == value) res
+ else ((ph, value)) :: res
+ } reverse
+ }
+
+ /** Returns List of ChangeAfterPhase objects, encapsulating those
+ * phase transitions where the result of the operation gave a different
+ * list than it had when run during the previous phase.
+ */
+ def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = {
+ val ops = ((NoPhase, Nil)) :: afterEachPhase(op)
+
+ ops sliding 2 map {
+ case (_, before) :: (ph, after) :: Nil =>
+ val lost = before filterNot (after contains _)
+ val gained = after filterNot (before contains _)
+ ChangeAfterPhase(ph, lost, gained)
+ case _ => ???
+ } toList
+ }
+ private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name)
+
+ case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) {
+ private def mkStr(what: String, xs: List[_]) = (
+ if (xs.isEmpty) ""
+ else xs.mkString(what + " after " + numberedPhase(ph) + " {\n ", "\n ", "\n}\n")
+ )
+ override def toString = mkStr("Lost", lost) + mkStr("Gained", gained)
+ }
+
+ def describeAfterEachPhase[T](op: => T): List[String] =
+ afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) }
+
+ def describeAfterEveryPhase[T](op: => T): String =
+ describeAfterEachPhase(op) map (" " + _ + "\n") mkString
+
+ def printAfterEachPhase[T](op: => T): Unit =
+ describeAfterEachPhase(op) foreach (m => println(" " + m))
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
@@ -821,6 +875,28 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+ // TODO - trim these to the absolute minimum.
+ @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
+ @inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
+ @inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op)
+ @inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op)
+ @inline final def afterMixin[T](op: => T): T = afterPhase(currentRun.mixinPhase)(op)
+ @inline final def afterPickler[T](op: => T): T = afterPhase(currentRun.picklerPhase)(op)
+ @inline final def afterRefchecks[T](op: => T): T = afterPhase(currentRun.refchecksPhase)(op)
+ @inline final def afterSpecialize[T](op: => T): T = afterPhase(currentRun.specializePhase)(op)
+ @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op)
+ @inline final def afterUncurry[T](op: => T): T = afterPhase(currentRun.uncurryPhase)(op)
+ @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op)
+ @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op)
+ @inline final def beforeFlatten[T](op: => T): T = beforePhase(currentRun.flattenPhase)(op)
+ @inline final def beforeIcode[T](op: => T): T = beforePhase(currentRun.icodePhase)(op)
+ @inline final def beforeMixin[T](op: => T): T = beforePhase(currentRun.mixinPhase)(op)
+ @inline final def beforePickler[T](op: => T): T = beforePhase(currentRun.picklerPhase)(op)
+ @inline final def beforeRefchecks[T](op: => T): T = beforePhase(currentRun.refchecksPhase)(op)
+ @inline final def beforeSpecialize[T](op: => T): T = beforePhase(currentRun.specializePhase)(op)
+ @inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op)
+ @inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op)
+
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
@@ -867,7 +943,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Counts for certain classes of warnings during this run. */
var deprecationWarnings: List[(Position, String)] = Nil
var uncheckedWarnings: List[(Position, String)] = Nil
-
+
/** A flag whether macro expansions failed */
var macroExpansionFailed = false
@@ -928,16 +1004,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
// Each subcomponent supplies a phase, which are chained together.
// If -Ystop:phase is given, neither that phase nor any beyond it is added.
// If -Yskip:phase is given, that phase will be skipped.
- val lastPhase = phaseDescriptors.tail .
- takeWhile (pd => !stopPhase(pd.phaseName)) .
- filterNot (pd => skipPhase(pd.phaseName)) .
- foldLeft (parserPhase) ((chain, ph) => ph newPhase chain)
-
- // Ensure there is a terminal phase at the end, since -Ystop may have limited the phases.
- terminalPhase =
- if (lastPhase.name == "terminal") lastPhase
- else terminal newPhase lastPhase
-
+ val phaseLinks = {
+ val phs = (
+ phaseDescriptors.tail
+ takeWhile (pd => !stopPhase(pd.phaseName))
+ filterNot (pd => skipPhase(pd.phaseName))
+ )
+ // Ensure there is a terminal phase at the end, since -Ystop may have limited the phases.
+ if (phs.isEmpty || (phs.last ne terminal)) phs :+ terminal
+ else phs
+ }
+ // Link them together.
+ phaseLinks.foldLeft(parserPhase)((chain, ph) => ph newPhase chain)
parserPhase
}
@@ -1014,38 +1092,46 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
}
def cancel() { reporter.cancelled = true }
-
+
private def currentProgress = (phasec * size) + unitc
private def totalProgress = (phaseDescriptors.size - 1) * size // -1: drops terminal phase
private def refreshProgress() = if (size > 0) progress(currentProgress, totalProgress)
// ----- finding phases --------------------------------------------
- def phaseNamed(name: String): Phase = {
- var p: Phase = firstPhase
- while (p.next != p && p.name != name) p = p.next
- if (p.name != name) NoPhase else p
- }
-
- val parserPhase = phaseNamed("parser")
- val namerPhase = phaseNamed("namer")
- // packageobjects
- val typerPhase = phaseNamed("typer")
- val inlineclassesPhase = phaseNamed("inlineclasses")
- // superaccessors
- val picklerPhase = phaseNamed("pickler")
- val refchecksPhase = phaseNamed("refchecks")
- val uncurryPhase = phaseNamed("uncurry")
- // tailcalls, specialize
- val explicitouterPhase = phaseNamed("explicitouter")
- val erasurePhase = phaseNamed("erasure")
- // lazyvals, lambdalift, constructors
- val flattenPhase = phaseNamed("flatten")
- val mixinPhase = phaseNamed("mixin")
- val cleanupPhase = phaseNamed("cleanup")
- val icodePhase = phaseNamed("icode")
- // inliner, closelim, dce
- val jvmPhase = phaseNamed("jvm")
+ def phaseNamed(name: String): Phase =
+ findOrElse(firstPhase.iterator)(_.name == name)(NoPhase)
+
+ /** All phases as of 3/2012 here for handiness; the ones in
+ * active use uncommented.
+ */
+ val parserPhase = phaseNamed("parser")
+ val namerPhase = phaseNamed("namer")
+ // val packageobjectsPhase = phaseNamed("packageobjects")
+ val typerPhase = phaseNamed("typer")
+ val inlineclassesPhase = phaseNamed("inlineclasses")
+ // val superaccessorsPhase = phaseNamed("superaccessors")
+ val picklerPhase = phaseNamed("pickler")
+ val refchecksPhase = phaseNamed("refchecks")
+ // val selectiveanfPhase = phaseNamed("selectiveanf")
+ // val selectivecpsPhase = phaseNamed("selectivecps")
+ val uncurryPhase = phaseNamed("uncurry")
+ // val tailcallsPhase = phaseNamed("tailcalls")
+ val specializePhase = phaseNamed("specialize")
+ val explicitouterPhase = phaseNamed("explicitouter")
+ val erasurePhase = phaseNamed("erasure")
+ // val lazyvalsPhase = phaseNamed("lazyvals")
+ val lambdaliftPhase = phaseNamed("lambdalift")
+ // val constructorsPhase = phaseNamed("constructors")
+ val flattenPhase = phaseNamed("flatten")
+ val mixinPhase = phaseNamed("mixin")
+ val cleanupPhase = phaseNamed("cleanup")
+ val icodePhase = phaseNamed("icode")
+ // val inlinerPhase = phaseNamed("inliner")
+ // val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+ // val closelimPhase = phaseNamed("closelim")
+ // val dcePhase = phaseNamed("dce")
+ val jvmPhase = phaseNamed("jvm")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
def runIsPast(ph: Phase) = globalPhase.id > ph.id
@@ -1090,7 +1176,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (!sym.owner.isPackageClass) compiles(sym.toplevelClass)
+ else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1128,7 +1214,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
def snapshot() = {
inform("\n[[symbol layout at end of " + phase + "]]")
- atPhase(phase.next) {
+ afterPhase(phase) {
trackers foreach { t =>
t.snapshot()
inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -1181,12 +1267,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
*/
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
try compileUnitsInternal(units, fromPhase)
- catch { case ex =>
+ catch { case ex =>
globalError(supplementErrorMessage("uncaught exception during compilation: " + ex.getClass.getName))
throw ex
}
}
-
+
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
units foreach addUnit
if (opt.profileAll) {
@@ -1199,7 +1285,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
checkDeprecatedSettings(unitbuf.head)
globalPhase = fromPhase
- while (globalPhase != terminalPhase && !reporter.hasErrors) {
+ while (globalPhase.hasNext && !reporter.hasErrors) {
val startTime = currentTime
phase = globalPhase
@@ -1320,19 +1406,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Compile abstract file until `globalPhase`, but at least to phase "namer".
*/
def compileLate(unit: CompilationUnit) {
- def stop(ph: Phase) = ph == null || ph.id >= (globalPhase.id max typerPhase.id)
- def loop(ph: Phase) {
- if (stop(ph)) refreshProgress
- else {
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)
- loop(ph.next match {
- case `ph` => null // ph == ph.next implies terminal, and null ends processing
- case x => x
- })
- }
- }
+ val maxId = math.max(globalPhase.id, typerPhase.id)
addUnit(unit)
- loop(firstPhase)
+
+ firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
+ atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)
+ )
+ refreshProgress
}
/**
@@ -1402,7 +1482,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def printAllUnits() {
print("[[syntax trees at end of " + phase + "]]")
- atPhase(phase.next) { currentRun.units foreach (treePrinter.print(_)) }
+ afterPhase(phase) { currentRun.units foreach (treePrinter.print(_)) }
}
private def findMemberFromRoot(fullName: Name): Symbol = {
diff --git a/src/compiler/scala/tools/nsc/MacroContext.scala b/src/compiler/scala/tools/nsc/MacroContext.scala
index 72662291f8..9ea1f87125 100644
--- a/src/compiler/scala/tools/nsc/MacroContext.scala
+++ b/src/compiler/scala/tools/nsc/MacroContext.scala
@@ -3,8 +3,8 @@ package scala.tools.nsc
import symtab.Flags._
trait MacroContext extends reflect.macro.Context { self: Global =>
-
+
def captureVariable(vble: Symbol): Unit = vble setFlag CAPTURED
-
+
def referenceCapturedVariable(id: Ident): Tree = ReferenceToBoxed(id)
}
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index cd9fef117f..a3e451f32f 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -47,6 +47,9 @@ abstract class SubComponent {
private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
private var ownPhaseRunId = global.NoRunId
+ @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op)
+ @inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op)
+
/** The phase corresponding to this subcomponent in the current compiler run */
def ownPhase: Phase = {
ownPhaseCache.get match {
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index ea51fc0141..9466d1c1f2 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -71,34 +71,39 @@ abstract class NodePrinters {
def nodeinfo(tree: Tree): String =
if (infolevel == InfoLevel.Quiet) ""
else {
- val buf = new StringBuilder(" // sym=" + tree.symbol)
- if (tree.hasSymbol) {
- if (tree.symbol.isPrimaryConstructor)
- buf.append(", isPrimaryConstructor")
- else if (tree.symbol.isConstructor)
- buf.append(", isConstructor")
- if (tree.symbol != NoSymbol)
- buf.append(", sym.owner=" + tree.symbol.owner)
- buf.append(", sym.tpe=" + tree.symbol.tpe)
- }
- buf.append(", tpe=" + tree.tpe)
- if (tree.tpe != null) {
- var sym = tree.tpe.termSymbol
- if (sym == NoSymbol) sym = tree.tpe.typeSymbol
- buf.append(", tpe.sym=" + sym)
- if (sym != NoSymbol) {
- buf.append(", tpe.sym.owner=" + sym.owner)
- if ((infolevel > InfoLevel.Normal) &&
- !(sym.owner eq definitions.ScalaPackageClass) &&
- !sym.isModuleClass && !sym.isPackageClass &&
- !sym.isJavaDefined) {
- val members = for (m <- tree.tpe.decls)
- yield m.toString() + ": " + m.tpe + ", "
- buf.append(", tpe.decls=" + members)
+ try {
+ val buf = new StringBuilder(" // sym=" + tree.symbol)
+ if (tree.hasSymbol) {
+ if (tree.symbol.isPrimaryConstructor)
+ buf.append(", isPrimaryConstructor")
+ else if (tree.symbol.isConstructor)
+ buf.append(", isConstructor")
+ if (tree.symbol != NoSymbol)
+ buf.append(", sym.owner=" + tree.symbol.owner)
+ buf.append(", sym.tpe=" + tree.symbol.tpe)
+ }
+ buf.append(", tpe=" + tree.tpe)
+ if (tree.tpe != null) {
+ var sym = tree.tpe.termSymbol
+ if (sym == NoSymbol) sym = tree.tpe.typeSymbol
+ buf.append(", tpe.sym=" + sym)
+ if (sym != NoSymbol) {
+ buf.append(", tpe.sym.owner=" + sym.owner)
+ if ((infolevel > InfoLevel.Normal) &&
+ !(sym.owner eq definitions.ScalaPackageClass) &&
+ !sym.isModuleClass && !sym.isPackageClass &&
+ !sym.isJavaDefined) {
+ val members = for (m <- tree.tpe.decls)
+ yield m.toString() + ": " + m.tpe + ", "
+ buf.append(", tpe.decls=" + members)
+ }
}
}
+ buf.toString
+ } catch {
+ case ex: Throwable =>
+ return " // sym= <error> " + ex.getMessage
}
- buf.toString
}
def nodeinfo2(tree: Tree): String =
(if (comma) "," else "") + nodeinfo(tree)
diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala
new file mode 100644
index 0000000000..7ece8bbd31
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala
@@ -0,0 +1,761 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Gilles Dubochet
+ */
+
+package scala.tools.nsc
+package ast
+
+import symtab._
+import Flags._
+import scala.reflect.api.Modifier._
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import scala.tools.nsc.util.FreshNameCreator
+import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
+
+/** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type.
+ * See more info in the comments to `reify' in scala.reflect.macro.Context.
+ *
+ * @author Martin Odersky
+ * @version 2.10
+ */
+trait Reifiers { self: Global =>
+
+ def reify(tree: Tree): Tree = {
+ class Reifier {
+ import definitions._
+ import Reifier._
+
+ final val scalaPrefix = "scala."
+ final val localPrefix = "$local"
+ final val memoizerName = "$memo"
+
+ val reifyDebug = settings.Yreifydebug.value
+
+ private val reifiableSyms = mutable.ArrayBuffer[Symbol]() // the symbols that are reified with the tree
+ private val symIndex = mutable.HashMap[Symbol, Int]() // the index of a reifiable symbol in `reifiableSyms`
+ private var boundSyms = Set[Symbol]() // set of all symbols that are bound in tree to be reified
+
+ private def definedInLiftedCode(tpe: Type) =
+ tpe exists (tp => boundSyms contains tp.typeSymbol)
+
+ private def definedInLiftedCode(sym: Symbol) =
+ boundSyms contains sym
+
+ /**
+ * Generate tree of the form
+ *
+ * { val $mr = scala.reflect.runtime.Mirror
+ * $local1 = new TypeSymbol(owner1, NoPosition, name1)
+ * ...
+ * $localN = new TermSymbol(ownerN, NoPositiion, nameN)
+ * $local1.setInfo(tpe1)
+ * ...
+ * $localN.setInfo(tpeN)
+ * $localN.setAnnotations(annotsN)
+ * rtree
+ * }
+ *
+ * where
+ *
+ * - `$localI` are free type symbols in the environment, as well as local symbols
+ * of refinement types.
+ * - `tpeI` are the info's of `symI`
+ * - `rtree` is code that generates `data` at runtime, maintaining all attributes.
+ * - `data` is typically a tree or a type.
+ */
+ def reifyTopLevel(data: Any): Tree = {
+ val rtree = reify(data)
+ Block(mirrorAlias :: reifySymbolTableSetup, rtree)
+ }
+
+ private def isLocatable(sym: Symbol) =
+ sym.isPackageClass || sym.owner.isClass || sym.isTypeParameter && sym.paramPos >= 0
+
+ private def registerReifiableSymbol(sym: Symbol): Unit =
+ if (!(symIndex contains sym)) {
+ sym.owner.ownersIterator find (x => !isLocatable(x)) foreach registerReifiableSymbol
+ symIndex(sym) = reifiableSyms.length
+ reifiableSyms += sym
+ }
+
+ // helper methods
+
+ private def localName(sym: Symbol): TermName =
+ newTermName(localPrefix + symIndex(sym))
+
+ private def call(fname: String, args: Tree*): Tree =
+ Apply(termPath(fname), args.toList)
+
+ private def mirrorSelect(name: String): Tree =
+ termPath(nme.MIRROR_PREFIX + name)
+
+ private def mirrorCall(name: TermName, args: Tree*): Tree =
+ call("" + (nme.MIRROR_PREFIX append name), args: _*)
+
+ private def mirrorCall(name: String, args: Tree*): Tree =
+ call(nme.MIRROR_PREFIX + name, args: _*)
+
+ private def mirrorFactoryCall(value: Product, args: Tree*): Tree =
+ mirrorFactoryCall(value.productPrefix, args: _*)
+
+ private def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
+ mirrorCall(prefix, args: _*)
+
+ private def scalaFactoryCall(name: String, args: Tree*): Tree =
+ call(scalaPrefix + name + ".apply", args: _*)
+
+ private def mkList(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.List", args: _*)
+
+ private def reifyModifiers(m: Modifiers) =
+ mirrorCall("modifiersFromInternalFlags", reify(m.flags), reify(m.privateWithin), reify(m.annotations))
+
+ private def reifyAggregate(name: String, args: Any*) =
+ scalaFactoryCall(name, (args map reify).toList: _*)
+
+ /**
+ * Reify a list
+ */
+ private def reifyList(xs: List[Any]): Tree =
+ mkList(xs map reify)
+
+ /**
+ * Reify an array
+ */
+ private def reifyArray(xs: Array[_]): Tree =
+ // @xeno.by: doesn't work for Array(LiteralAnnotArg(...))
+ // because we cannot generate manifests for path-dependent types
+ scalaFactoryCall(nme.Array, xs map reify: _*)
+
+ /** Reify a name */
+ private def reifyName(name: Name) =
+ mirrorCall(if (name.isTypeName) "newTypeName" else "newTermName", Literal(Constant(name.toString)))
+
+ private def isFree(sym: Symbol) =
+ !(symIndex contains sym)
+
+ /**
+ * Reify a reference to a symbol
+ */
+ private def reifySymRef(sym: Symbol): Tree = {
+ symIndex get sym match {
+ case Some(idx) =>
+ Ident(localName(sym))
+ case None =>
+ if (sym == NoSymbol)
+ mirrorSelect("NoSymbol")
+ else if (sym == RootPackage)
+ mirrorSelect("definitions.RootPackage")
+ else if (sym == RootClass)
+ mirrorSelect("definitions.RootClass")
+ else if (sym == EmptyPackage)
+ mirrorSelect("definitions.EmptyPackage")
+ else if (sym.isModuleClass)
+ Select(reifySymRef(sym.sourceModule), "moduleClass")
+ else if (sym.isStatic && sym.isClass)
+ mirrorCall("staticClass", reify(sym.fullName))
+ else if (sym.isStatic && sym.isModule)
+ mirrorCall("staticModule", reify(sym.fullName))
+ else if (isLocatable(sym))
+ if (sym.isTypeParameter)
+ mirrorCall("selectParam", reify(sym.owner), reify(sym.paramPos))
+ else {
+ if (reifyDebug) println("locatable: " + sym + " " + sym.isPackageClass + " " + sym.owner + " " + sym.isTypeParameter)
+ val rowner = reify(sym.owner)
+ val rname = reify(sym.name.toString)
+ if (sym.isType)
+ mirrorCall("selectType", rowner, rname)
+ else if (sym.isMethod && sym.owner.isClass && sym.owner.info.decl(sym.name).isOverloaded) {
+ val index = sym.owner.info.decl(sym.name).alternatives indexOf sym
+ assert(index >= 0, sym)
+ mirrorCall("selectOverloadedMethod", rowner, rname, reify(index))
+ } else
+ mirrorCall("selectTerm", rowner, rname)
+ }
+ else {
+ if (sym.isTerm) {
+ if (reifyDebug) println("Free: " + sym)
+ val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false)
+ def markIfCaptured(arg: Ident): Tree =
+ if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg
+ mirrorCall("newFreeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym)))
+ } else {
+ if (reifyDebug) println("Late local: " + sym)
+ registerReifiableSymbol(sym)
+ reifySymRef(sym)
+ }
+ }
+ }
+ }
+
+ /**
+ * reify the creation of a symbol
+ */
+ private def reifySymbolDef(sym: Symbol): Tree = {
+ if (reifyDebug) println("reify sym def " + sym)
+
+ ValDef(NoMods, localName(sym), TypeTree(),
+ Apply(
+ Select(reify(sym.owner), "newNestedSymbol"),
+ List(reify(sym.name), reify(sym.pos), Literal(Constant(sym.flags)))
+ )
+ )
+ }
+
+ /**
+ * Generate code to add type and annotation info to a reified symbol
+ */
+ private def fillInSymbol(sym: Symbol): Tree = {
+ val rset = Apply(Select(reifySymRef(sym), nme.setTypeSignature), List(reifyType(sym.info)))
+ if (sym.annotations.isEmpty) rset
+ else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations)))
+ }
+
+ /** Reify a scope */
+ private def reifyScope(scope: Scope): Tree = {
+ scope foreach registerReifiableSymbol
+ mirrorCall(nme.newScopeWith, scope.toList map reifySymRef: _*)
+ }
+
+ /** Reify a list of symbols that need to be created */
+ private def reifySymbols(syms: List[Symbol]): Tree = {
+ syms foreach registerReifiableSymbol
+ mkList(syms map reifySymRef)
+ }
+
+ /** Reify a type that defines some symbols */
+ private def reifyTypeBinder(value: Product, bound: List[Symbol], underlying: Type): Tree =
+ mirrorFactoryCall(value, reifySymbols(bound), reify(underlying))
+
+ /** Reify a type */
+ private def reifyType(tpe0: Type): Tree = {
+ val tpe = tpe0.normalize
+
+ if (tpe.isErroneous)
+ CannotReifyErroneousType(tpe)
+ if (definedInLiftedCode(tpe))
+ CannotReifyTypeInvolvingBoundType(tpe)
+
+ val tsym = tpe.typeSymbol
+ if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
+ Select(reifySymRef(tpe.typeSymbol), nme.asTypeConstructor)
+ else tpe match {
+ case t @ NoType =>
+ reifyMirrorObject(t)
+ case t @ NoPrefix =>
+ reifyMirrorObject(t)
+ case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic =>
+ mirrorCall(nme.thisModuleType, reify(clazz.fullName))
+ case t @ RefinedType(parents, decls) =>
+ registerReifiableSymbol(tpe.typeSymbol)
+ mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
+ case t @ ClassInfoType(parents, decls, clazz) =>
+ registerReifiableSymbol(clazz)
+ mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
+ case t @ ExistentialType(tparams, underlying) =>
+ reifyTypeBinder(t, tparams, underlying)
+ case t @ PolyType(tparams, underlying) =>
+ reifyTypeBinder(t, tparams, underlying)
+ case t @ MethodType(params, restpe) =>
+ reifyTypeBinder(t, params, restpe)
+ case t @ AnnotatedType(anns, underlying, selfsym) =>
+ val saved1 = reifySymbols
+ val saved2 = reifyTypes
+
+ try {
+ // one more quirk of reifying annotations
+ //
+ // when reifying AnnotatedTypes we need to reify all the types and symbols of inner ASTs
+ // that's because a lot of logic expects post-typer trees to have non-null tpes
+ //
+ // Q: reified trees are pre-typer, so there's shouldn't be a problem.
+ // reflective typechecker will fill in missing symbols and types, right?
+ // A: actually, no. annotation ASTs live inside AnnotatedTypes,
+ // and insides of the types is the place where typechecker doesn't look.
+ reifySymbols = true
+ reifyTypes = true
+ if (reifyDebug) println("reify AnnotatedType: " + tpe)
+ reifyProductUnsafe(tpe)
+ } finally {
+ reifySymbols = saved1
+ reifyTypes = saved2
+ }
+ case _ =>
+ reifyProductUnsafe(tpe)
+ }
+ }
+
+ var reifySymbols = false
+ var reifyTypes = false
+
+ /** Preprocess a tree before reification */
+ private def trimTree(tree: Tree): Tree = {
+ def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]) = {
+ var stats1 = stats filterNot (stat => stat.isDef && {
+ if (stat.symbol.isCaseAccessorMethod && reifyDebug) println("discarding case accessor method: " + stat)
+ stat.symbol.isCaseAccessorMethod
+ })
+ stats1 = stats1 filterNot (memberDef => memberDef.isDef && {
+ val isSynthetic = memberDef.symbol.isSynthetic
+ // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic members are, in fact, generated of case classes
+// val isCaseMember = deff.symbol.isCaseClass || deff.symbol.companionClass.isCaseClass
+ val isCaseMember = true
+ if (isSynthetic && isCaseMember && reifyDebug) println("discarding case class synthetic def: " + memberDef)
+ isSynthetic && isCaseMember
+ })
+ stats1 = stats1 map {
+ case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isCaseAccessor =>
+ if (reifyDebug) println("resetting visibility of case accessor field: " + valdef)
+ val Modifiers(flags, privateWithin, annotations) = mods
+ val flags1 = flags & ~Flags.LOCAL & ~Flags.PRIVATE
+ val mods1 = Modifiers(flags1, privateWithin, annotations)
+ ValDef(mods1, name, tpt, rhs).copyAttrs(valdef)
+ case stat =>
+ stat
+ }
+ stats1
+ }
+
+ def trimSyntheticCaseClassCompanions(stats: List[Tree]) =
+ stats diff (stats collect { case moddef: ModuleDef => moddef } filter (moddef => {
+ val isSynthetic = moddef.symbol.isSynthetic
+ // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic modules are, in fact, companions of case classes
+// val isCaseCompanion = moddef.symbol.companionClass.isCaseClass
+ val isCaseCompanion = true
+ // @xeno.by: we also have to do this ugly hack for the very same reason described above
+ // normally this sort of stuff is performed in reifyTree, which binds related symbols, however, local companions will be out of its reach
+ if (reifyDebug) println("boundSym: "+ moddef.symbol)
+ boundSyms += moddef.symbol
+ if (isSynthetic && isCaseCompanion && reifyDebug) println("discarding synthetic case class companion: " + moddef)
+ isSynthetic && isCaseCompanion
+ }))
+
+ tree match {
+ case tree if tree.isErroneous =>
+ tree
+ case ta @ TypeApply(hk, ts) =>
+ def isErased(tt: TypeTree) = tt.tpe != null && definedInLiftedCode(tt.tpe) && tt.original == null
+ val discard = ts collect { case tt: TypeTree => tt } exists isErased
+ if (reifyDebug && discard) println("discarding TypeApply: " + tree)
+ if (discard) hk else ta
+ case classDef @ ClassDef(mods, name, params, impl) =>
+ val Template(parents, self, body) = impl
+ val body1 = trimSyntheticCaseClassMembers(classDef, body)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ClassDef(mods, name, params, impl1).copyAttrs(classDef)
+ case moduledef @ ModuleDef(mods, name, impl) =>
+ val Template(parents, self, body) = impl
+ val body1 = trimSyntheticCaseClassMembers(moduledef, body)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ModuleDef(mods, name, impl1).copyAttrs(moduledef)
+ case template @ Template(parents, self, body) =>
+ val body1 = trimSyntheticCaseClassCompanions(body)
+ Template(parents, self, body1).copyAttrs(template)
+ case block @ Block(stats, expr) =>
+ val stats1 = trimSyntheticCaseClassCompanions(stats)
+ Block(stats1, expr).copyAttrs(block)
+ case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isLazy =>
+ if (reifyDebug) println("dropping $lzy in lazy val's name: " + tree)
+ val name1 = if (name endsWith nme.LAZY_LOCAL) name dropRight nme.LAZY_LOCAL.length else name
+ ValDef(mods, name1, tpt, rhs).copyAttrs(valdef)
+ case unapply @ UnApply(fun, args) =>
+ def extractExtractor(tree: Tree): Tree = {
+ val Apply(fun, args) = tree
+ args match {
+ case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
+ val Select(extractor, flavor) = fun
+ assert(flavor == nme.unapply || flavor == nme.unapplySeq)
+ extractor
+ case _ =>
+ extractExtractor(fun)
+ }
+ }
+
+ if (reifyDebug) println("unapplying unapply: " + tree)
+ val fun1 = extractExtractor(fun)
+ Apply(fun1, args).copyAttrs(unapply)
+ case _ =>
+ tree
+ }
+ }
+
+ /** Reify a tree */
+ private def reifyTree(tree0: Tree): Tree = {
+ val tree = trimTree(tree0)
+
+ var rtree = tree match {
+ case tree if tree.isErroneous =>
+ CannotReifyErroneousTree(tree)
+ case self.EmptyTree =>
+ reifyMirrorObject(EmptyTree)
+ case self.emptyValDef =>
+ mirrorSelect(nme.emptyValDef)
+ case This(_) if tree.symbol != NoSymbol && !(boundSyms contains tree.symbol) =>
+ reifyFree(tree)
+ case Ident(_) if tree.symbol != NoSymbol && !(boundSyms contains tree.symbol) =>
+ if (tree.symbol.isVariable && tree.symbol.owner.isTerm) {
+ if (reifyDebug) println("captured variable: " + tree.symbol)
+ captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reifyTree here.
+ mirrorCall("Select", reifyFree(tree), reifyName(nme.elem))
+ } else reifyFree(tree)
+ case tt: TypeTree if (tt.tpe != null) =>
+ reifyTypeTree(tt)
+ case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) =>
+ CannotReifyClassOfBoundType(tree, tpe)
+ case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym =>
+ CannotReifyClassOfBoundEnum(tree, constant.tpe)
+ case tree if tree.isDef =>
+ if (reifyDebug) println("boundSym: %s of type %s".format(tree.symbol, (tree.productIterator.toList collect { case tt: TypeTree => tt } headOption).getOrElse(TypeTree(tree.tpe))))
+ boundSyms += tree.symbol
+
+ bindRelatedSymbol(tree.symbol.sourceModule, "sourceModule")
+ bindRelatedSymbol(tree.symbol.moduleClass, "moduleClass")
+ bindRelatedSymbol(tree.symbol.companionClass, "companionClass")
+ bindRelatedSymbol(tree.symbol.companionModule, "companionModule")
+ Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") }
+ def bindRelatedSymbol(related: Symbol, name: String): Unit =
+ if (related != null && related != NoSymbol) {
+ if (reifyDebug) println("boundSym (" + name + "): " + related)
+ boundSyms += related
+ }
+
+ val prefix = tree.productPrefix
+ val elements = (tree.productIterator map {
+ // annotations exist in two flavors:
+ // 1) pre-typer ones that populate: a) Modifiers, b) Annotated nodes (irrelevant in this context)
+ // 2) post-typer ones that dwell inside: a) sym.annotations, b) AnnotatedTypes (irrelevant in this context)
+ //
+ // here we process Modifiers that are involved in deftrees
+ // AnnotatedTypes get reified elsewhere (currently, in ``reifyTypeTree'')
+ case Modifiers(flags, privateWithin, annotations) =>
+ assert(annotations.isEmpty) // should've been eliminated by the typer
+ val postTyper = tree.symbol.annotations filter (_.original != EmptyTree)
+ if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for %s: %s".format(tree.symbol, tree.symbol.annotations))
+ val preTyper = postTyper map toPreTyperAnnotation
+ Modifiers(flags, privateWithin, preTyper)
+ case x =>
+ x
+ }).toList
+ reifyProduct(prefix, elements)
+ case _ =>
+ reifyProduct(tree)
+ }
+
+ // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
+ // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
+ if (reifySymbols && tree.hasSymbol) {
+ if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
+ rtree = Apply(Select(rtree, nme.setSymbol), List(reifySymRef(tree.symbol)))
+ }
+ if (reifyTypes && tree.tpe != null) {
+ if (reifyDebug) println("reifying type %s for tree %s".format(tree.tpe, tree))
+ rtree = Apply(Select(rtree, nme.setType), List(reifyType(tree.tpe)))
+ }
+
+ rtree
+ }
+
+ /** Reify pre-typer representation of a type.
+ *
+ * NB: This is the trickiest part of reification!
+ *
+ * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
+ * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+ * then we cannot reify it, or otherwise subsequent reflective compilation will fail.
+ *
+ * Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
+ * so naively reified symbols will become out of sync, which brings really funny compilation errors and/or crashes, e.g.:
+ * https://issues.scala-lang.org/browse/SI-5230
+ *
+ * To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
+ * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+ * And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
+ * In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
+ *
+ * An important property of the original is that it isn't just a pre-typer tree.
+ * It's actually kind of a post-typer tree with symbols assigned to its Idents (e.g. Ident("List") will contain a symbol that points to immutable.this.List).
+ * This is very important, since subsequent reflective compilation won't have to resolve these symbols.
+ * In general case, such resolution cannot be performed, since reification doesn't preserve lexical context,
+ * which means that reflective compilation won't be aware of, say, imports that were provided when the reifee has been compiled.
+ *
+ * This workaround worked surprisingly well and allowed me to fix several important reification bugs, until the abstraction has leaked.
+ * Suddenly I found out that in certain contexts original trees do not contain symbols, but are just parser trees.
+ * To the moment I know only one such situation: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless.
+ * This is laboriously worked around in the code below. I hope this will be the only workaround in this department.
+ */
+ private def reifyTypeTree(tt: TypeTree): Tree = {
+ if (definedInLiftedCode(tt.tpe)) {
+ if (reifyDebug) println("reifyTypeTree, defined in lifted code: " + tt.tpe)
+ if (tt.original != null) {
+ val annotations = tt.tpe filter { _.isInstanceOf[AnnotatedType] } collect { case atp: AnnotatedType => atp.annotations } flatten
+ val annmap = annotations map { ann => (ann.original, ann) } toMap
+
+ // annotations exist in two flavors:
+ // 1) pre-typer ones that populate: a) Modifiers (irrelevant in this context), b) Annotated nodes
+ // 2) post-typer ones that dwell inside: a) sym.annotations (irrelevant in this context), b) AnnotatedTypes
+ //
+ // here we process AnnotatedTypes, since only they can be involved in TypeTrees
+ // Modifiers get reified elsewhere (currently, in the "isDef" case of ``reifyTree'')
+ //
+ // the problem with annotations is that their originals don't preserve any symbols at all
+ // read the comment to this method to find out why it's bad
+ // that's why we transplant typechecked, i.e. symful, annotations onto original trees
+ class AnnotationFixup extends self.Transformer {
+ override def transform(tree: Tree) = tree match {
+ case Annotated(ann0, args) =>
+ assert(annmap contains ann0)
+ val ann1 = annmap(ann0)
+ val ann = toPreTyperAnnotation(ann1)
+ Annotated(ann, transform(args))
+ case _ =>
+ tree
+ }
+ }
+
+ if (reifyDebug) println("verdict: essential, reify as original")
+ val patchedOriginal = new AnnotationFixup().transform(tt.original)
+ reifyTree(patchedOriginal)
+ } else {
+ // type is deemed to be non-essential
+ // erase it and hope that subsequent reflective compilation will be able to recreate it again
+ if (reifyDebug) println("verdict: non-essential, discard")
+ mirrorCall("TypeTree")
+ }
+ } else {
+ var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe))
+ // @xeno.by: temporarily disabling reification of originals
+ // subsequent reflective compilation will try to typecheck them
+ // and this means that the reifier has to do additional efforts to ensure that this will succeed
+ // additional efforts + no clear benefit = will be implemented later
+// if (tt.original != null) {
+// val setOriginal = Select(rtt, newTermName("setOriginal"))
+// val reifiedOriginal = reify(tt.original)
+// rtt = Apply(setOriginal, List(reifiedOriginal))
+// }
+ rtt
+ }
+ }
+
+ /** Reify post-typer representation of an annotation */
+ private def reifyAnnotation(ann: AnnotationInfo): Tree =
+ // @xeno.by: if you reify originals, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
+ mirrorFactoryCall("AnnotationInfo", reifyType(ann.atp), reifyList(ann.args), reify(ann.assocs))
+
+ /** Reify pre-typer representation of an annotation.
+ * The trick here is to retain the symbols that have been populated during typechecking of the annotation.
+ * If we do not do that, subsequent reflective compilation will fail.
+ */
+ private def toPreTyperAnnotation(ann: AnnotationInfo): Tree = {
+ if (definedInLiftedCode(ann.atp)) {
+ // todo. deconstruct reifiable tree from ann.original and ann.args+ann.assocs
+ //
+ // keep in mind that we can't simply use ann.original, because its args are symless
+ // which means that any imported symbol (e.g. List) will crash subsequent reflective compilation
+ // hint: if I had enough time, I'd try to extract reifiable annotation type from ann.original
+ // and to apply its constructor to ann.args (that are symful, i.e. suitable for reification)
+ //
+ // also, if we pursue the route of reifying annotations defined in lifted code
+ // we should think about how to provide types for all nodes of the return value
+ // this will be necessary for reifying AnnotatedTypes, since ASTs inside ATs must all have non-null tpes
+ // an alternative would be downgrading ATs to Annotated nodes, but this needs careful thinking
+ // for now I just leave this as an implementation restriction
+ CannotReifyAnnotationInvolvingBoundType(ann)
+ } else {
+ val args = if (ann.assocs.isEmpty) {
+ ann.args
+ } else {
+ def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = jann match {
+ case LiteralAnnotArg(const) =>
+ Literal(const)
+ case ArrayAnnotArg(arr) =>
+ Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
+ case NestedAnnotArg(ann) =>
+ toPreTyperAnnotation(ann)
+ }
+
+ ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
+ }
+
+ New(ann.atp, args: _*)
+ }
+ }
+
+ /**
+ * Reify a free reference. The result will be either a mirror reference
+ * to a global value, or else a mirror Literal.
+ */
+ private def reifyFree(tree: Tree): Tree = tree match {
+ case This(_) if tree.symbol.isClass && !tree.symbol.isModuleClass =>
+ val sym = tree.symbol
+ if (reifyDebug) println("This for %s, reified as freeVar".format(sym))
+ if (reifyDebug) println("Free: " + sym)
+ val freeVar = mirrorCall("newFreeVar", reify(sym.name.toString), reify(sym.tpe), This(sym))
+ mirrorCall(nme.Ident, freeVar)
+ case This(_) =>
+ if (reifyDebug) println("This for %s, reified as This".format(tree.symbol))
+ mirrorCall(nme.This, reifySymRef(tree.symbol))
+ case _ =>
+ mirrorCall(nme.Ident, reifySymRef(tree.symbol))
+ }
+
+ // todo: consider whether we should also reify positions
+ private def reifyPosition(pos: Position): Tree =
+ reifyMirrorObject(NoPosition)
+
+ // !!! we must eliminate these casts.
+ private def reifyProductUnsafe(x: Any): Tree =
+ if (x.isInstanceOf[Product]) reifyProduct(x.asInstanceOf[Product])
+ else throw new Exception("%s of type %s cannot be cast to Product".format(x, x.getClass))
+ private def reifyProduct(x: Product): Tree =
+ reifyProduct(x.productPrefix, x.productIterator.toList)
+ private def reifyProduct(prefix: String, elements: List[Any]): Tree = {
+ // @xeno.by: reflection would be more robust, but, hey, this is a hot path
+ if (prefix.startsWith("Tuple")) reifyAggregate(prefix, elements: _*)
+ else mirrorCall(prefix, (elements map reify): _*)
+ }
+
+ /**
+ * Reify a case object defined in Mirror
+ */
+ private def reifyMirrorObject(name: String): Tree = mirrorSelect(name)
+ private def reifyMirrorObject(x: Product): Tree = reifyMirrorObject(x.productPrefix)
+
+ private def isReifiableConstant(value: Any) = value match {
+ case null => true // seems pretty reifable to me?
+ case _: String => true
+ case _ => isAnyVal(value)
+ }
+
+ /** Reify an arbitary value */
+ private def reify(value: Any): Tree = value match {
+ case tree: Tree => reifyTree(tree)
+ case sym: Symbol => reifySymRef(sym)
+ case tpe: Type => reifyType(tpe)
+ case xs: List[_] => reifyList(xs)
+ case xs: Array[_] => reifyArray(xs)
+ case scope: Scope => reifyScope(scope)
+ case x: Name => reifyName(x)
+ case x: Position => reifyPosition(x)
+ case x: Modifiers => reifyModifiers(x)
+ case x: AnnotationInfo => reifyAnnotation(x)
+ case _ =>
+ if (isReifiableConstant(value)) Literal(Constant(value))
+ else reifyProductUnsafe(value)
+ }
+
+ /**
+ * An (unreified) path that refers to definition with given fully qualified name
+ * @param mkName Creator for last portion of name (either TermName or TypeName)
+ */
+ private def path(fullname: String, mkName: String => Name): Tree = {
+ val parts = fullname split "\\."
+ val prefixParts = parts.init
+ val lastName = mkName(parts.last)
+ if (prefixParts.isEmpty) Ident(lastName)
+ else {
+ val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _))
+ Select(prefixTree, lastName)
+ }
+ }
+
+ /** An (unreified) path that refers to term definition with given fully qualified name */
+ private def termPath(fullname: String): Tree = path(fullname, newTermName)
+
+ /** An (unreified) path that refers to type definition with given fully qualified name */
+ private def typePath(fullname: String): Tree = path(fullname, newTypeName)
+
+ private def mirrorAlias =
+ ValDef(NoMods, nme.MIRROR_SHORT, SingletonTypeTree(termPath(fullnme.MirrorPackage)), termPath(fullnme.MirrorPackage))
+
+ /**
+ * Generate code that generates a symbol table of all symbols registered in `reifiableSyms`
+ */
+ private def reifySymbolTableSetup: List[Tree] = {
+ val symDefs, fillIns = new mutable.ArrayBuffer[Tree]
+ var i = 0
+ while (i < reifiableSyms.length) {
+ // fillInSymbol might create new reifiableSyms, that's why this is done iteratively
+ symDefs += reifySymbolDef(reifiableSyms(i))
+ fillIns += fillInSymbol(reifiableSyms(i))
+ i += 1
+ }
+
+ symDefs.toList ++ fillIns.toList
+ }
+ } // end of Reifier
+
+ object Reifier {
+ def CannotReifyPreTyperTree(tree: Tree) = {
+ val msg = "pre-typer trees are not supported, consider typechecking the tree before passing it to the reifier"
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyErroneousTree(tree: Tree) = {
+ val msg = "erroneous trees are not supported, make sure that your tree typechecks successfully before passing it to the reifier"
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyErroneousType(tpe: Type) = {
+ val msg = "erroneous types are not supported, make sure that your tree typechecks successfully before passing it to the reifier"
+ throw new ReifierError(NoPosition, msg)
+ }
+
+ def CannotReifyClassOfBoundType(tree: Tree, tpe: Type) = {
+ val msg = "implementation restriction: cannot reify classOf[%s] which refers to a type declared inside the block being reified".format(tpe)
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyClassOfBoundEnum(tree: Tree, tpe: Type) = {
+ val msg = "implementation restriction: cannot reify classOf[%s] which refers to an enum declared inside the block being reified".format(tpe)
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyTypeInvolvingBoundType(tpe: Type) = {
+ val msg = "implementation restriction: cannot reify type %s which involves a symbol declared inside the block being reified".format(tpe)
+ throw new ReifierError(NoPosition, msg)
+ }
+
+ def CannotReifyAnnotationInvolvingBoundType(ann: AnnotationInfo) = {
+ val msg = "implementation restriction: cannot reify annotation @%s which involves a symbol declared inside the block being reified".format(ann)
+ throw new ReifierError(ann.original.pos, msg)
+ }
+ } // end of Reifier
+
+ // begin reify
+ import Reifier._
+ if (tree.tpe != null) {
+ val saved = printTypings
+ try {
+ val reifyDebug = settings.Yreifydebug.value
+ val debugTrace = util.trace when reifyDebug
+ debugTrace("transforming = ")(if (settings.Xshowtrees.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+ debugTrace("transformed = ") {
+ val reifier = new Reifier()
+ val untyped = reifier.reifyTopLevel(tree)
+
+ val reifyCopypaste = settings.Yreifycopypaste.value
+ if (reifyCopypaste) {
+ if (reifyDebug) println("=======================")
+ println(reifiedNodeToString(untyped))
+ if (reifyDebug) println("=======================")
+ }
+
+ untyped
+ }
+ } finally {
+ printTypings = saved
+ }
+ } else {
+ CannotReifyPreTyperTree(tree)
+ }
+ }
+
+ /** A throwable signalling a reification error */
+ class ReifierError(var pos: Position, val msg: String) extends Throwable(msg) {
+ def this(msg: String) = this(NoPosition, msg)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala
new file mode 100644
index 0000000000..fce59bb099
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala
@@ -0,0 +1,75 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package ast
+
+import compat.Platform.EOL
+import symtab._
+import Flags._
+
+trait ReifyPrinters { self: NodePrinters =>
+
+ val global: Global
+ import global._
+
+ object reifiedNodeToString extends Function1[Tree, String] {
+ def apply(tree: Tree): String = {
+ import scala.reflect.api.Modifier
+
+ // @PP: I fervently hope this is a test case or something, not anything being
+ // depended upon. Of more fragile code I cannot conceive.
+ // @eb: This stuff is only needed to debug-print out reifications in human-readable format
+ // Rolling a full-fledged, robust TreePrinter would be several times more code.
+ (for (line <- (tree.toString.split(EOL) drop 2 dropRight 1)) yield {
+ var s = line.trim
+ s = s.replace("$mr.", "")
+ s = s.replace(".apply", "")
+ s = s.replace("scala.collection.immutable.", "")
+ s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
+ s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
+ s = s.replace("immutable.this.Nil", "List()")
+ s = s.replace("modifiersFromInternalFlags", "Modifiers")
+ s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
+ s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+ val buf = new collection.mutable.ListBuffer[String]
+
+ val annotations = m.group(3)
+ if (buf.nonEmpty || annotations.nonEmpty)
+ buf.append("List(" + annotations + ")")
+
+ val privateWithin = "" + m.group(2)
+ if (buf.nonEmpty || privateWithin != "")
+ buf.append("newTypeName(\"" + privateWithin + "\")")
+
+ val flags = m.group(1).toLong
+ val s_flags = Flags.modifiersOfFlags(flags) map (_.sourceString) mkString ", "
+ if (buf.nonEmpty || s_flags != "")
+ buf.append("Set(" + s_flags + ")")
+
+ "Modifiers(" + buf.reverse.mkString(", ") + ")"
+ })
+ s = """setInternalFlags\((\d+)L\)""".r.replaceAllIn(s, m => {
+ val flags = m.group(1).toLong
+ val mods = Flags.modifiersOfFlags(flags) map (_.sourceString)
+ "setInternalFlags(flagsOfModifiers(List(" + mods.mkString(", ") + ")))"
+ })
+
+ s
+ }) mkString EOL
+ }
+ }
+
+
+ def printReifyCopypaste(tree: Tree) {
+ val reifyDebug = settings.Yreifydebug.value
+ if (reifyDebug) println("=======================")
+ printReifyCopypaste1(tree)
+ if (reifyDebug) println("=======================")
+ }
+
+ def printReifyCopypaste1(tree: Tree) {
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 2cfd21ecc8..0d19b781e2 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -253,13 +253,11 @@ trait TreeDSL {
}
/** Top level accessible. */
- def MATCHERROR(arg: Tree) = Throw(New(TypeTree(MatchErrorClass.tpe), List(List(arg))))
- /** !!! should generalize null guard from match error here. */
- def THROW(sym: Symbol): Throw = Throw(New(TypeTree(sym.tpe), List(Nil)))
- def THROW(sym: Symbol, msg: Tree): Throw = Throw(New(TypeTree(sym.tpe), List(List(msg.TOSTRING()))))
+ def MATCHERROR(arg: Tree) = Throw(MatchErrorClass.tpe, arg)
+ def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
- def NEW(sym: Symbol, args: Tree*): Tree = New(sym, args: _*)
+ def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp
def DEF(name: Name): DefTreeStart = new DefTreeStart(name)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 265d017653..d7159c5fa8 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -13,7 +13,7 @@ import symtab.SymbolTable
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
*/
-abstract class TreeGen extends reflect.internal.TreeGen {
+abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
val global: Global
import global._
@@ -30,7 +30,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
else
tree
}
-
+
/** Builds a fully attributed wildcard import node.
*/
def mkWildcardImport(pkg: Symbol): Import = {
@@ -51,13 +51,12 @@ abstract class TreeGen extends reflect.internal.TreeGen {
}
// wrap the given expression in a SoftReference so it can be gc-ed
- def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
- New(SoftReferenceClass, expr)
- }
+ def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
+
// annotate the expression with @unchecked
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
- // are very pick about things and it crashes the compiler with "unexpected new".
+ // are very picky about things and it crashes the compiler with "unexpected new".
Annotated(New(scalaDot(UncheckedClass.name), List(Nil)), expr)
}
// if it's a Match, mark the selector unchecked; otherwise nothing.
@@ -66,18 +65,81 @@ abstract class TreeGen extends reflect.internal.TreeGen {
case _ => tree
}
- def withDefaultCase(matchExpr: Tree, defaultAction: Tree/*scrutinee*/ => Tree): Tree = matchExpr match {
- case Match(scrutinee, cases) =>
- if (cases exists treeInfo.isDefaultCase) matchExpr
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(scrutinee))
- Match(scrutinee, cases :+ defaultCase)
+ // must be kept in synch with the codegen in PatMatVirtualiser
+ object VirtualCaseDef {
+ def unapply(b: Block): Option[(Assign, Tree, Tree)] = b match {
+ case Block(List(assign@Assign(keepGoingLhs, falseLit), matchRes), zero) => Some((assign, matchRes, zero)) // TODO: check tree annotation
+ case _ => None
+ }
+ }
+
+ // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
+ // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
+ class MatchMatcher {
+ def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
+
+ def apply(matchExpr: Tree): Tree = (matchExpr: @unchecked) match {
+ // old-style match or virtpatmat switch
+ case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
+ caseMatch(matchExpr, selector, cases, identity)
+ // old-style match or virtpatmat switch
+ case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
+ caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
+ // virtpatmat
+ case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
+ caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
+ // optimized version of virtpatmat
+ case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, epilogue) if opt.virtPatmat => // TODO: check tree annotation // println("virtopt match: "+ (zero, x, matchRes, keepGoing, stats) + "for:\n"+ matchExpr )
+ caseVirtualizedMatchOpt(matchExpr, zero, x, matchRes, keepGoing, stats, epilogue, identity)
+ // optimized version of virtpatmat
+ case Block(outerStats, orig@Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, epilogue)) if opt.virtPatmat => // TODO: check tree annotation // println("virt opt block match: "+ (zero, x, matchRes, keepGoing, stats, outerStats) + "for:\n"+ matchExpr )
+ caseVirtualizedMatchOpt(matchExpr, zero, x, matchRes, keepGoing, stats, epilogue, m => copyBlock(matchExpr, outerStats, m))
+ case other =>
+ unknownTree(other)
+ }
+
+ def unknownTree(t: Tree): Tree = throw new MatchError(t)
+ def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
+
+ def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
+ if (!opt.virtPatmat) cases
+ else cases filter {
+ case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
+ case CaseDef(pat, guard, body) => true
+ }
+ }
+
+ def withDefaultCase(matchExpr: Tree, defaultAction: Tree/*scrutinee*/ => Tree): Tree = {
+ object withDefaultTransformer extends MatchMatcher {
+ override def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = {
+ val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
+ if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) orig
+ else {
+ val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
+ wrap(Match(selector, casesNoSynthCatchAll :+ defaultCase))
+ }
+ }
+ override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = { import CODE._
+ ((matcher APPLY (scrut)) DOT nme.getOrElse) APPLY (defaultAction(scrut.duplicate)) // TODO: pass targs
}
- case _ =>
- matchExpr
- // [Martin] Adriaan: please fill in virtpatmat transformation here
+ override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree): Tree = { import CODE._
+ wrap(Block(
+ zero ::
+ x ::
+ matchRes ::
+ keepGoing ::
+ stats,
+ // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `if (keepGoing) ${defaultAction(`x`)} else matchRes`
+ (IF (REF(keepGoing.symbol)) THEN defaultAction(x.rhs.duplicate) ELSE REF(matchRes.symbol))
+ ))
+ }
+ }
+ withDefaultTransformer(matchExpr)
}
+
def mkCached(cvar: Symbol, expr: Tree): Tree = {
val cvarRef = mkUnattributedRef(cvar)
Block(
@@ -98,7 +160,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
def mkModuleVarDef(accessor: Symbol) = {
val inClass = accessor.owner.isClass
val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
+
val mval = (
accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
setInfo accessor.tpe.finalResultType
@@ -118,10 +180,11 @@ abstract class TreeGen extends reflect.internal.TreeGen {
def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
DefDef(accessor, Select(This(msym.owner), msym))
- def newModule(accessor: Symbol, tpe: Type) =
- New(TypeTree(tpe),
- List(for (pt <- tpe.typeSymbol.primaryConstructor.info.paramTypes)
- yield This(accessor.owner.enclClass)))
+ def newModule(accessor: Symbol, tpe: Type) = {
+ val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
+ if (ps.isEmpty) New(tpe)
+ else New(tpe, This(accessor.owner.enclClass))
+ }
// def m: T;
def mkModuleAccessDcl(accessor: Symbol) =
@@ -156,6 +219,18 @@ abstract class TreeGen extends reflect.internal.TreeGen {
def mkSynchronized(monitor: Tree, body: Tree): Tree =
Apply(Select(monitor, Object_synchronized), List(body))
+ def mkAppliedTypeForCase(clazz: Symbol): Tree = {
+ val numParams = clazz.typeParams.size
+ if (clazz.typeParams.isEmpty) Ident(clazz)
+ else AppliedTypeTree(Ident(clazz), 1 to numParams map (_ => Bind(tpnme.WILDCARD, EmptyTree)) toList)
+ }
+ def mkBindForCase(patVar: Symbol, clazz: Symbol, targs: List[Type]): Tree = {
+ Bind(patVar, Typed(Ident(nme.WILDCARD),
+ if (targs.isEmpty) mkAppliedTypeForCase(clazz)
+ else AppliedTypeTree(Ident(clazz), targs map TypeTree)
+ ))
+ }
+
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index 5c3071739c..3371353f25 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -51,14 +51,11 @@ trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
treePrinter.println()
treePrinter.print(definition)
- case AssignOrNamedArg(lhs, rhs) =>
- treePrinter.print(lhs, " = ", rhs)
-
case TypeTreeWithDeferredRefCheck() =>
treePrinter.print("<tree with deferred refcheck>")
case SelectFromArray(qualifier, name, _) =>
- treePrinter.print(qualifier, ".<arr>", treePrinter.symName(tree, name))
+ treePrinter.print(qualifier, ".<arr>", symName(tree, name))
case _ =>
super.xprintTree(treePrinter, tree)
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 83b6252b26..ad87889145 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -30,12 +30,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
override def isType = definition.isType
}
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply)
- */
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
-
/** Array selection <qualifier> . <name> only used during erasure */
case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
extends TermTree with RefTree
@@ -85,16 +79,16 @@ trait Trees extends reflect.internal.Trees { self: Global =>
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
val gvdefs = evdefs map {
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- treeCopy.ValDef(
- vdef.duplicate, mods, name,
- atPos(focusPos(vdef.pos)) { TypeTree() setOriginal tpt setPos focusPos(tpt.pos) }, // atPos in case
- EmptyTree)
- }
- val lvdefs = evdefs map {
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
+ case vdef @ ValDef(_, _, tpt, _) => copyValDef(vdef)(
+ // !!! I know "atPos in case" wasn't intentionally planted to
+ // add an air of mystery to this file, but it is the sort of
+ // comment which only its author could love.
+ tpt = atPos(focusPos(vdef.pos))(TypeTree() setOriginal tpt setPos focusPos(tpt.pos)), // atPos in case
+ rhs = EmptyTree
+ )
}
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = Modifiers(PRESUPER)) }
+
val constrs = {
if (constrMods hasFlag TRAIT) {
if (body forall treeInfo.isInterfaceMember) List()
@@ -114,13 +108,11 @@ trait Trees extends reflect.internal.Trees { self: Global =>
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
- // println("typed template, gvdefs = "+gvdefs+", parents = "+parents+", constrs = "+constrs)
constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs))
- // vparamss2 are used as field definitions for the class. remove defaults
- val vparamss2 = vparamss map (vps => vps map { vd =>
- treeCopy.ValDef(vd, vd.mods &~ DEFAULTPARAM, vd.name, vd.tpt, EmptyTree)
- })
- Template(parents, self, gvdefs ::: vparamss2.flatten ::: constrs ::: etdefs ::: rest)
+ // Field definitions for the class - remove defaults.
+ val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
+
+ Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
}
/** Construct class definition with given class symbol, value parameters,
@@ -155,8 +147,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
traverser.traverseTrees(ts)
case DocDef(comment, definition) =>
traverser.traverse(definition)
- case AssignOrNamedArg(lhs, rhs) =>
- traverser.traverse(lhs); traverser.traverse(rhs)
case SelectFromArray(qualifier, selector, erasure) =>
traverser.traverse(qualifier)
case ReferenceToBoxed(idt) =>
@@ -168,7 +158,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
trait TreeCopier extends super.TreeCopierOps {
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
def ReferenceToBoxed(tree: Tree, idt: Ident): ReferenceToBoxed
def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
@@ -180,8 +169,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier {
def DocDef(tree: Tree, comment: DocComment, definition: Tree) =
new DocDef(comment, definition).copyAttrs(tree)
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) =
- new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) =
new SelectFromArray(qualifier, selector, erasure).copyAttrs(tree)
def ReferenceToBoxed(tree: Tree, idt: Ident) =
@@ -197,11 +184,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
if (comment0 == comment) && (definition0 == definition) => t
case _ => this.treeCopy.DocDef(tree, comment, definition)
}
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
- case t @ AssignOrNamedArg(lhs0, rhs0)
- if (lhs0 == lhs) && (rhs0 == rhs) => t
- case _ => this.treeCopy.AssignOrNamedArg(tree, lhs, rhs)
- }
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) = tree match {
case t @ SelectFromArray(qualifier0, selector0, _)
if (qualifier0 == qualifier) && (selector0 == selector) => t
@@ -232,8 +214,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
override protected def xtransform(transformer: super.Transformer, tree: Tree): Tree = tree match {
case DocDef(comment, definition) =>
transformer.treeCopy.DocDef(tree, comment, transformer.transform(definition))
- case AssignOrNamedArg(lhs, rhs) =>
- transformer.treeCopy.AssignOrNamedArg(tree, transformer.transform(lhs), transformer.transform(rhs))
case SelectFromArray(qualifier, selector, erasure) =>
transformer.treeCopy.SelectFromArray(
tree, transformer.transform(qualifier), selector, erasure)
@@ -269,12 +249,27 @@ trait Trees extends reflect.internal.Trees { self: Global =>
* (bq:) This transformer has mutable state and should be discarded after use
*/
private class ResetAttrs(localOnly: Boolean) {
+ val debug = settings.debug.value
+ val trace = scala.tools.nsc.util.trace when debug
+
val locals = util.HashSet[Symbol](8)
+ val orderedLocals = collection.mutable.ListBuffer[Symbol]()
+ def registerLocal(sym: Symbol) {
+ if (sym != null && sym != NoSymbol) {
+ if (debug && !(locals contains sym)) orderedLocals append sym
+ locals addEntry sym
+ }
+ }
class MarkLocals extends self.Traverser {
- def markLocal(tree: Tree) =
- if (tree.symbol != null && tree.symbol != NoSymbol)
- locals addEntry tree.symbol
+ def markLocal(tree: Tree) {
+ if (tree.symbol != null && tree.symbol != NoSymbol) {
+ val sym = tree.symbol
+ registerLocal(sym)
+ registerLocal(sym.sourceModule)
+ registerLocal(sym.moduleClass)
+ }
+ }
override def traverse(tree: Tree) = {
tree match {
@@ -319,9 +314,12 @@ trait Trees extends reflect.internal.Trees { self: Global =>
def transform[T <: Tree](x: T): T = {
new MarkLocals().traverse(x)
- val trace = scala.tools.nsc.util.trace when settings.debug.value
- val eoln = System.getProperty("line.separator")
- trace("locals (%d total): %n".format(locals.size))(locals.toList map {" " + _} mkString eoln)
+ if (debug) {
+ assert(locals.size == orderedLocals.size)
+ val eoln = System.getProperty("line.separator")
+ val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString eoln
+ trace("locals (%d total): %n".format(orderedLocals.size))(msg)
+ }
val x1 = new Transformer().transform(x)
assert(x.getClass isInstance x1)
@@ -333,7 +331,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case Parens(expr) (only used during parsing)
case DocDef(comment, defn) => (eliminated by typer)
- case AssignOrNamedArg(lhs, rhs) => (eliminated by typer)
case TypeTreeWithDeferredRefCheck() => (created and eliminated by typer)
case SelectFromArray(_, _, _) => (created and eliminated by erasure)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index c2b4dc32b6..cd19fca0b0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -658,7 +658,8 @@ self =>
DocDef(doc, t) setPos {
if (t.pos.isDefined) {
val pos = doc.pos.withEnd(t.pos.endOrPoint)
- if (t.pos.isOpaqueRange) pos else pos.makeTransparent
+ // always make the position transparent
+ pos.makeTransparent
} else {
t.pos
}
@@ -1205,7 +1206,7 @@ self =>
*/
def wildcardType(start: Int) = {
val pname = freshTypeName("_$")
- val t = atPos(start) { Ident(pname) }
+ val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
val param = atPos(t.pos union bounds.pos) { makeSyntheticTypeParam(pname, bounds) }
placeholderTypes = param :: placeholderTypes
@@ -1423,15 +1424,14 @@ self =>
def implicitClosure(start: Int, location: Int): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
- var paramexpr: Tree = Ident(ident())
- if (in.token == COLON) {
- in.nextToken()
- paramexpr = Typed(paramexpr, typeOrInfixType(location))
+ Ident(ident()) match {
+ case expr if in.token == COLON =>
+ in.nextToken() ; Typed(expr, typeOrInfixType(location))
+ case expr => expr
}
- paramexpr
}
}
- val param = treeCopy.ValDef(param0, param0.mods | Flags.IMPLICIT, param0.name, param0.tpt, param0.rhs)
+ val param = copyValDef(param0)(mods = param0.mods | Flags.IMPLICIT)
atPos(start, in.offset) {
accept(ARROW)
Function(List(param), if (location != InBlock) expr() else block())
@@ -2689,8 +2689,8 @@ self =>
val (self, body) = templateBody(true)
if (in.token == WITH && self.isEmpty) {
val earlyDefs: List[Tree] = body flatMap {
- case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isDeferred =>
- List(treeCopy.ValDef(vdef, mods | Flags.PRESUPER, name, tpt, rhs))
+ case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+ List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
case tdef @ TypeDef(mods, name, tparams, rhs) =>
List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
case stat if !stat.isEmpty =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index dae264fffe..2895d02dfe 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -84,7 +84,7 @@ trait Scanners extends ScannersCommon {
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
-
+
def isAtEnd = charOffset >= buf.length
def flush = { charOffset = offset; nextChar(); this }
@@ -164,7 +164,7 @@ trait Scanners extends ScannersCommon {
* RBRACE if region starts with '{'
* ARROW if region starts with `case'
* STRINGLIT if region is a string interpolation expression starting with '${'
- * (the STRINGLIT appears twice in succession on the stack iff the
+ * (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
var sepRegions: List[Int] = List()
@@ -173,15 +173,15 @@ trait Scanners extends ScannersCommon {
/** Are we directly in a string interpolation expression?
*/
- @inline private def inStringInterpolation =
+ @inline private def inStringInterpolation =
sepRegions.nonEmpty && sepRegions.head == STRINGLIT
-
+
/** Are we directly in a multiline string interpolation expression?
* @pre: inStringInterpolation
*/
- @inline private def inMultiLineInterpolation =
- sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
-
+ @inline private def inMultiLineInterpolation =
+ inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+
/** read next token and return last offset
*/
def skipToken(): Offset = {
@@ -205,7 +205,7 @@ trait Scanners extends ScannersCommon {
case CASE =>
sepRegions = ARROW :: sepRegions
case RBRACE =>
- while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
+ while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
docBuffer = null
@@ -217,11 +217,13 @@ trait Scanners extends ScannersCommon {
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
case STRINGLIT =>
- if (inStringInterpolation)
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
sepRegions = sepRegions.tail
case _ =>
}
-
+
// Read a token or copy it from `next` tokenData
if (next.token == EMPTY) {
lastOffset = charOffset - 1
@@ -325,8 +327,8 @@ trait Scanners extends ScannersCommon {
'z' =>
putChar(ch)
nextChar()
- getIdentRest()
- if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
+ getIdentRest()
+ if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
token = INTERPOLATIONID
case '<' => // is XMLSTART?
val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
@@ -386,7 +388,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\"') {
nextRawChar()
getStringPart(multiLine = true)
- sepRegions = STRINGLIT :: sepRegions // indicate string part
+ sepRegions = STRINGPART :: sepRegions // indicate string part
sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
} else {
token = STRINGLIT
@@ -407,7 +409,7 @@ trait Scanners extends ScannersCommon {
token = STRINGLIT
strVal = ""
}
- } else {
+ } else {
getStringLit()
}
}
@@ -630,8 +632,8 @@ trait Scanners extends ScannersCommon {
else finishNamed()
}
}
-
-
+
+
// Literals -----------------------------------------------------------------
private def getStringLit() = {
@@ -659,21 +661,27 @@ trait Scanners extends ScannersCommon {
getRawStringLit()
}
}
-
+
@annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
next.lastOffset = charOffset - 1
next.offset = charOffset - 1
- }
+ }
if (ch == '"') {
- nextRawChar()
- if (!multiLine || isTripleQuote()) {
+ if (multiLine) {
+ nextRawChar()
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getStringPart(multiLine)
+ } else {
+ nextChar()
setStrVal()
token = STRINGLIT
- } else
- getStringPart(multiLine)
+ }
} else if (ch == '$') {
nextRawChar()
if (ch == '$') {
@@ -696,20 +704,23 @@ trait Scanners extends ScannersCommon {
} else {
syntaxError("invalid string interpolation")
}
- } else if ((ch == CR || ch == LF || ch == SU) && !isUnicodeEscape) {
- syntaxError("unclosed string literal")
} else {
- putChar(ch)
- nextRawChar()
- getStringPart(multiLine)
+ val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
+ if (isUnclosedLiteral) {
+ syntaxError(if (!multiLine) "unclosed string literal" else "unclosed multi-line string literal")
+ } else {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ }
}
}
-
+
private def fetchStringPart() = {
offset = charOffset - 1
getStringPart(multiLine = inMultiLineInterpolation)
}
-
+
private def isTripleQuote(): Boolean =
if (ch == '"') {
nextRawChar()
@@ -730,7 +741,7 @@ trait Scanners extends ScannersCommon {
false
}
- /** copy current character into cbuf, interpreting any escape sequences,
+ /** copy current character into cbuf, interpreting any escape sequences,
* and advance to next character.
*/
protected def getLitChar(): Unit =
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 091f333c27..fb4daefd57 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -58,7 +58,7 @@ object Tokens extends Tokens {
final val BACKQUOTED_IDENT = 11
def isIdentifier(code: Int) =
code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
+
@switch def canBeginExpression(code: Int) = code match {
case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
case LBRACE|LPAREN|LBRACKET|COMMENT => true
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 40389466e2..0d2fbc5372 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -471,15 +471,11 @@ abstract class TreeBuilder {
def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean): Tree =
makeVisitor(cases, checkExhaustive, "x$")
- private def makeUnchecked(expr: Tree): Tree = atPos(expr.pos) {
- Annotated(New(scalaDot(definitions.UncheckedClass.name), List(Nil)), expr)
- }
-
/** Create visitor <x => x match cases> */
def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String): Tree = {
- val x = freshTermName(prefix)
- val id = Ident(x)
- val sel = if (checkExhaustive) id else makeUnchecked(id)
+ val x = freshTermName(prefix)
+ val id = Ident(x)
+ val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
Function(List(makeSyntheticParam(x)), Match(sel, cases))
}
@@ -564,7 +560,7 @@ abstract class TreeBuilder {
val vars = getVariables(pat1)
val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
Match(
- makeUnchecked(rhs),
+ gen.mkUnchecked(rhs),
List(
atPos(pat1.pos) {
CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident, true))
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 05571b2424..aab944f65a 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -565,7 +565,7 @@ abstract class ScalaPrimitives {
import definitions._
val code = getPrimitive(fun)
- def elementType = atPhase(currentRun.typerPhase) {
+ def elementType = beforeTyper {
val arrayParent = tpe :: tpe.parents collectFirst {
case TypeRef(_, ArrayClass, elem :: Nil) => elem
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 4ab0eb0129..68c4ac03f6 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -18,7 +18,7 @@ trait BasicBlocks {
import opcodes._
import global.{ ifDebug, settings, log, nme }
import nme.isExceptionResultName
-
+
object NoBasicBlock extends BasicBlock(-1, null)
/** This class represents a basic block. Each
@@ -182,7 +182,7 @@ trait BasicBlocks {
final def foreach[U](f: Instruction => U) = {
if (!closed) dumpMethodAndAbort(method, this)
else instrs foreach f
-
+
// !!! If I replace "instrs foreach f" with the following:
// var i = 0
// val len = instrs.length
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 3baff7da9e..9e801e3ea8 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -94,7 +94,7 @@ abstract class GenICode extends SubComponent {
// !! modules should be eliminated by refcheck... or not?
case ModuleDef(mods, name, impl) =>
- abort("Modules should not reach backend!")
+ abort("Modules should not reach backend! " + tree)
case ValDef(mods, name, tpt, rhs) =>
ctx // we use the symbol to add fields
@@ -133,7 +133,7 @@ abstract class GenICode extends SubComponent {
if (!ctx1.bb.closed) ctx1.bb.close
prune(ctx1.method)
} else
- ctx1.method.setCode(null)
+ ctx1.method.setCode(NoCode)
ctx1
case Template(_, _, body) =>
@@ -393,15 +393,15 @@ abstract class GenICode extends SubComponent {
for (CaseDef(pat, _, body) <- catches.reverse) yield {
def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
(sym, kind, ctx => {
- ctx.bb.emit(DROP(REFERENCE(sym)))
+ ctx.bb.emit(DROP(REFERENCE(sym))) // drop the loaded exception
genLoad(body, ctx, kind)
})
pat match {
case Typed(Ident(nme.WILDCARD), tpt) => genWildcardHandler(tpt.tpe.typeSymbol)
case Ident(nme.WILDCARD) => genWildcardHandler(ThrowableClass)
- case Bind(name, _) =>
- val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false)
+ case Bind(_, _) =>
+ val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false) // the exception will be loaded and stored into this local
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
@@ -704,7 +704,8 @@ abstract class GenICode extends SubComponent {
ctx1
case New(tpt) =>
- abort("Unexpected New")
+ abort("Unexpected New(" + tpt.summaryString + "/" + tpt + ") received in icode.\n" +
+ " Call was genLoad" + ((tree, ctx, expectedType)))
case Apply(TypeApply(fun, targs), _) =>
val sym = fun.symbol
@@ -1054,7 +1055,7 @@ abstract class GenICode extends SubComponent {
case Match(selector, cases) =>
debuglog("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT)
+ var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
val afterCtx = ctx1.newBlock
var caseCtx: Context = null
generatedType = toTypeKind(tree.tpe)
@@ -2086,12 +2087,12 @@ abstract class GenICode extends SubComponent {
exh
}) else None
- val exhs = handlers.map { handler =>
- val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
+ val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
+ val exh = this.newExceptionHandler(sym, kind, tree.pos)
var ctx1 = outerCtx.enterExceptionHandler(exh)
ctx1.addFinalizer(finalizer, finalizerCtx)
loadException(ctx1, exh, tree.pos)
- ctx1 = handler._3(ctx1)
+ ctx1 = handler(ctx1)
// emit finalizer
val ctx2 = emitFinalizer(ctx1)
ctx2.bb.closeWith(JUMP(afterCtx.bb))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 2668e7f29f..36651541b2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -21,7 +21,7 @@ trait Members {
self: ICodes =>
import global._
-
+
object NoCode extends Code(null, "NoCode") {
override def blocksList: List[BasicBlock] = Nil
}
@@ -138,7 +138,7 @@ trait Members {
/** Represent a field in ICode */
class IField(val symbol: Symbol) extends IMember { }
-
+
object NoIMethod extends IMethod(NoSymbol) { }
/**
@@ -177,19 +177,13 @@ trait Members {
/** method parameters */
var params: List[Local] = Nil
- // TODO - see how null is stil arriving here
- def hasCode = (code ne NoCode) && (code ne null)
+ def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
this.code = code;
this
}
- def addLocal(l: Local): Local =
- locals find (_ == l) getOrElse {
- locals ::= l
- l
- }
-
+ def addLocal(l: Local): Local = findOrElse(locals)(_ == l) { locals ::= l ; l }
def addParam(p: Local): Unit =
if (params contains p) ()
@@ -214,6 +208,12 @@ trait Members {
override def toString() = symbol.fullName
+ def matchesSignature(other: IMethod) = {
+ (symbol.name == other.symbol.name) &&
+ (params corresponds other.params)(_.kind == _.kind) &&
+ (returnType == other.returnType)
+ }
+
import opcodes._
def checkLocals(): Unit = {
def localsSet = (code.blocks flatMap { bb =>
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 2bcfb9d4a9..ec6c631bd1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -350,6 +350,7 @@ trait Opcodes { self: ICodes =>
}
case class BOX(boxType: TypeKind) extends Instruction {
+ assert(boxType.isValueType && (boxType ne UNIT)) // documentation
override def toString(): String = "BOX " + boxType
override def consumed = 1
override def consumedTypes = boxType :: Nil
@@ -357,6 +358,7 @@ trait Opcodes { self: ICodes =>
}
case class UNBOX(boxType: TypeKind) extends Instruction {
+ assert(boxType.isValueType && (boxType ne UNIT)) // documentation
override def toString(): String = "UNBOX " + boxType
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index 37fff0e1e8..f99ac28e9d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -120,47 +120,50 @@ trait Primitives { self: ICodes =>
/** This class represents a test operation. */
- class TestOp {
+ sealed abstract class TestOp {
/** Returns the negation of this operation. */
- def negate(): TestOp = this match {
- case EQ => NE
- case NE => EQ
- case LT => GE
- case GE => LT
- case LE => GT
- case GT => LE
- case _ => throw new RuntimeException("TestOp unknown case")
- }
+ def negate(): TestOp
/** Returns a string representation of this operation. */
- override def toString(): String = this match {
- case EQ => "EQ"
- case NE => "NE"
- case LT => "LT"
- case GE => "GE"
- case LE => "LE"
- case GT => "GT"
- case _ => throw new RuntimeException("TestOp unknown case")
- }
+ override def toString(): String
}
+
/** An equality test */
- case object EQ extends TestOp
+ case object EQ extends TestOp {
+ def negate() = NE
+ override def toString() = "EQ"
+ }
/** A non-equality test */
- case object NE extends TestOp
+ case object NE extends TestOp {
+ def negate() = EQ
+ override def toString() = "NE"
+ }
/** A less-than test */
- case object LT extends TestOp
+ case object LT extends TestOp {
+ def negate() = GE
+ override def toString() = "LT"
+ }
/** A greater-than-or-equal test */
- case object GE extends TestOp
+ case object GE extends TestOp {
+ def negate() = LT
+ override def toString() = "GE"
+ }
/** A less-than-or-equal test */
- case object LE extends TestOp
+ case object LE extends TestOp {
+ def negate() = GT
+ override def toString() = "LE"
+ }
/** A greater-than test */
- case object GT extends TestOp
+ case object GT extends TestOp {
+ def negate() = LE
+ override def toString() = "GT"
+ }
/** This class represents an arithmetic operation. */
class ArithmeticOp {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index a485272ca6..2ff0c1926c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -74,22 +74,19 @@ trait TypeKinds { self: ICodes =>
case _ => false
}
- /** On the JVM, these types are like Ints for the
- * purposes of calculating the lub.
+ /** On the JVM,
+ * BOOL, BYTE, CHAR, SHORT, and INT
+ * are like Ints for the purposes of calculating the lub.
*/
- def isIntSizedType: Boolean = this match {
- case BOOL | CHAR | BYTE | SHORT | INT => true
- case _ => false
- }
- def isIntegralType: Boolean = this match {
- case BYTE | SHORT | INT | LONG | CHAR => true
- case _ => false
- }
- def isRealType: Boolean = this match {
- case FLOAT | DOUBLE => true
- case _ => false
- }
- def isNumericType: Boolean = isIntegralType | isRealType
+ def isIntSizedType: Boolean = false
+
+ /** On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is. */
+ def isIntegralType: Boolean = false
+
+ /** On the JVM, FLOAT and DOUBLE. */
+ def isRealType: Boolean = false
+
+ final def isNumericType: Boolean = isIntegralType | isRealType
/** Simple subtyping check */
def <:<(other: TypeKind): Boolean = (this eq other) || (this match {
@@ -97,11 +94,8 @@ trait TypeKinds { self: ICodes =>
case _ => this eq other
})
- /** Is this type a category 2 type in JVM terms? */
- def isWideType: Boolean = this match {
- case DOUBLE | LONG => true
- case _ => false
- }
+ /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
+ def isWideType: Boolean = false
/** The number of dimensions for array types. */
def dimensions: Int = 0
@@ -145,7 +139,7 @@ trait TypeKinds { self: ICodes =>
* Here we make the adjustment by rewinding to a pre-erasure state and
* sifting through the parents for a class type.
*/
- def lub0(tk1: TypeKind, tk2: TypeKind): Type = atPhase(currentRun.uncurryPhase) {
+ def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry {
import definitions._
val tp = global.lub(List(tk1.toType, tk2.toType))
val (front, rest) = tp.parents span (_.typeSymbol.hasTraitFlag)
@@ -182,6 +176,7 @@ trait TypeKinds { self: ICodes =>
/** A boolean value */
case object BOOL extends ValueTypeKind {
+ override def isIntSizedType = true
def maxType(other: TypeKind) = other match {
case BOOL | REFERENCE(NothingClass) => BOOL
case _ => uncomparable(other)
@@ -195,6 +190,8 @@ trait TypeKinds { self: ICodes =>
/** A 1-byte signed integer */
case object BYTE extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
def maxType(other: TypeKind) = {
if (other == BYTE || other.isNothingType) BYTE
else if (other == CHAR) INT
@@ -205,6 +202,8 @@ trait TypeKinds { self: ICodes =>
/** A 2-byte signed integer */
case object SHORT extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
override def maxType(other: TypeKind) = other match {
case BYTE | SHORT | REFERENCE(NothingClass) => SHORT
case CHAR => INT
@@ -215,6 +214,8 @@ trait TypeKinds { self: ICodes =>
/** A 2-byte UNSIGNED integer */
case object CHAR extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
override def maxType(other: TypeKind) = other match {
case CHAR | REFERENCE(NothingClass) => CHAR
case BYTE | SHORT => INT
@@ -225,6 +226,8 @@ trait TypeKinds { self: ICodes =>
/** A 4-byte signed integer */
case object INT extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
override def maxType(other: TypeKind) = other match {
case BYTE | SHORT | CHAR | INT | REFERENCE(NothingClass) => INT
case LONG | FLOAT | DOUBLE => other
@@ -234,6 +237,8 @@ trait TypeKinds { self: ICodes =>
/** An 8-byte signed integer */
case object LONG extends ValueTypeKind {
+ override def isIntegralType = true
+ override def isWideType = true
override def maxType(other: TypeKind): TypeKind =
if (other.isIntegralType || other.isNothingType) LONG
else if (other.isRealType) DOUBLE
@@ -242,6 +247,7 @@ trait TypeKinds { self: ICodes =>
/** A 4-byte floating point number */
case object FLOAT extends ValueTypeKind {
+ override def isRealType = true
override def maxType(other: TypeKind): TypeKind =
if (other == DOUBLE) DOUBLE
else if (other.isNumericType || other.isNothingType) FLOAT
@@ -250,6 +256,8 @@ trait TypeKinds { self: ICodes =>
/** An 8-byte floating point number */
case object DOUBLE extends ValueTypeKind {
+ override def isRealType = true
+ override def isWideType = true
override def maxType(other: TypeKind): TypeKind =
if (other.isNumericType || other.isNothingType) DOUBLE
else uncomparable(other)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index ba4b250303..8a2ec9a191 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -21,7 +21,7 @@ trait TypeStacks {
* stack of the ICode.
*/
type Rep = List[TypeKind]
-
+
object NoTypeStack extends TypeStack(Nil) { }
class TypeStack(var types: Rep) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 60cb679782..9f43e1b84c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -60,20 +60,17 @@ trait DataFlowAnalysis[L <: SemiLattice] {
val output = f(point, in(point))
if ((lattice.bottom == out(point)) || output != out(point)) {
-// Console.println("Output changed at " + point
-// + " from: " + out(point) + " to: " + output
-// + " for input: " + in(point) + " and they are different: " + (output != out(point)))
+ // Console.println("Output changed at " + point
+ // + " from: " + out(point) + " to: " + output
+ // + " for input: " + in(point) + " and they are different: " + (output != out(point)))
out(point) = output
val succs = point.successors
succs foreach { p =>
- if (!worklist(p))
- worklist += p;
- if (!in.isDefinedAt(p))
- assert(false, "Invalid successor for: " + point + " successor " + p + " does not exist")
-// if (!p.exceptionHandlerHeader) {
-// println("lubbing " + p.predecessors + " outs: " + p.predecessors.map(out.apply).mkString("\n", "\n", ""))
- in(p) = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
-// }
+ val updated = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
+ if(updated != in(p)) {
+ in(p) = updated
+ if (!worklist(p)) { worklist += p; }
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index c06bd2e097..69de0dfa90 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -105,11 +105,9 @@ abstract class ReachingDefinitions {
def genAndKill(b: BasicBlock): (ListSet[Definition], ListSet[Local]) = {
var genSet = ListSet[Definition]()
var killSet = ListSet[Local]()
- for ((i, idx) <- b.toList.zipWithIndex) i match {
- case STORE_LOCAL(local) =>
- killSet = killSet + local
- genSet = updateReachingDefinition(b, idx, genSet)
- case _ => ()
+ for ((STORE_LOCAL(local), idx) <- b.toList.zipWithIndex) {
+ killSet = killSet + local
+ genSet = updateReachingDefinition(b, idx, genSet)
}
(genSet, killSet)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 6421d6c8ef..877c51ebc1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -127,34 +127,6 @@ abstract class TypeFlowAnalysis {
}
}
- /** reinitialize the analysis, keeping around solutions from a previous run. */
- def reinit(m: icodes.IMethod) {
- if (this.method == null || this.method.symbol != m.symbol)
- init(m)
- else reinit {
- m foreachBlock { b =>
- if (!in.contains(b)) {
- for (p <- b.predecessors) {
- if (out.isDefinedAt(p)) {
- in(b) = out(p)
- worklist += p
- }
- /* else
- in(b) = typeFlowLattice.bottom
- */ }
- out(b) = typeFlowLattice.bottom
- }
- }
- for (handler <- m.exh) {
- val start = handler.startBlock
- if (!in.contains(start)) {
- worklist += start
- in(start) = lattice.IState(in(start).vars, typeStackLattice.exceptionHandlerStack)
- }
- }
- }
- }
-
def this(m: icodes.IMethod) {
this()
init(m)
@@ -162,7 +134,7 @@ abstract class TypeFlowAnalysis {
def run = {
timer.start
-// icodes.lubs0 = 0
+ // icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
val t = timer.stop
if (settings.debug.value) {
@@ -170,216 +142,35 @@ abstract class TypeFlowAnalysis {
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
-// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
-// + "\n\t" + iterations + " iterations: " + t + " ms."
-// + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
+ // log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
+ // + "\n\t" + iterations + " iterations: " + t + " ms."
+ // + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- b.iterator.foldLeft(in)(interpret)
- }
- /** The flow function of a given basic block. */
- /* var flowFun: immutable.Map[BasicBlock, TransferFunction] = new immutable.HashMap */
-
- /** Fill flowFun with a transfer function per basic block. */
-/*
- private def buildFlowFunctions(blocks: List[BasicBlock]) {
- def transfer(b: BasicBlock): TransferFunction = {
- var gens: List[Gen] = Nil
- var consumed: Int = 0
- val stack = new SimulatedStack
-
- for (instr <- b) instr match {
- case THIS(clasz) =>
- stack push toTypeKind(clasz.tpe)
-
- case CONSTANT(const) =>
- stack push toTypeKind(const.tpe)
-
- case LOAD_ARRAY_ITEM(kind) =>
- stack.pop2
- stack.push(kind)
-
- case LOAD_LOCAL(local) =>
- val t = bindings(local)
- stack push (if (t == typeLattice.bottom) local.kind else t)
-
- case LOAD_FIELD(field, isStatic) =>
- if (!isStatic)
- stack.pop
- stack push toTypeKind(field.tpe)
-
- case LOAD_MODULE(module) =>
- stack push toTypeKind(module.tpe)
-
- case STORE_ARRAY_ITEM(kind) =>
- stack.pop3
-
- case STORE_LOCAL(local) =>
- val t = stack.pop
- bindings += (local -> t)
-
- case STORE_THIS(_) =>
- stack.pop
-
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- stack.pop
- else
- stack.pop2
-
- case CALL_PRIMITIVE(primitive) =>
- primitive match {
- case Negation(kind) =>
- stack.pop; stack.push(kind)
- case Test(_, kind, zero) =>
- stack.pop
- if (!zero) stack.pop
- stack push BOOL;
- case Comparison(_, _) =>
- stack.pop2
- stack push INT
-
- case Arithmetic(op, kind) =>
- stack.pop
- if (op != NOT)
- stack.pop
- val k = kind match {
- case BYTE | SHORT | CHAR => INT
- case _ => kind
- }
- stack push k
-
- case Logical(op, kind) =>
- stack.pop2
- stack push kind
-
- case Shift(op, kind) =>
- stack.pop2
- stack push kind
-
- case Conversion(src, dst) =>
- stack.pop
- stack push dst
-
- case ArrayLength(kind) =>
- stack.pop
- stack push INT
-
- case StartConcat =>
- stack.push(ConcatClass)
-
- case EndConcat =>
- stack.pop
- stack.push(STRING)
-
- case StringConcat(el) =>
- stack.pop2
- stack push ConcatClass
- }
-
- case CALL_METHOD(method, style) => style match {
- case Dynamic =>
- stack.pop(1 + method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
-
- case Static(onInstance) =>
- if (onInstance) {
- stack.pop(1 + method.info.paramTypes.length)
- if (!method.isConstructor)
- stack.push(toTypeKind(method.info.resultType));
- } else {
- stack.pop(method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
- }
-
- case SuperCall(mix) =>
- stack.pop(1 + method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
- }
-
- case BOX(kind) =>
- stack.pop
- stack.push(BOXED(kind))
-
- case UNBOX(kind) =>
- stack.pop
- stack.push(kind)
-
- case NEW(kind) =>
- stack.push(kind)
-
- case CREATE_ARRAY(elem, dims) =>
- stack.pop(dims)
- stack.push(ARRAY(elem))
-
- case IS_INSTANCE(tpe) =>
- stack.pop
- stack.push(BOOL)
-
- case CHECK_CAST(tpe) =>
- stack.pop
- stack.push(tpe)
-
- case SWITCH(tags, labels) =>
- stack.pop
-
- case JUMP(whereto) =>
- ()
-
- case CJUMP(success, failure, cond, kind) =>
- stack.pop2
-
- case CZJUMP(success, failure, cond, kind) =>
- stack.pop
-
- case RETURN(kind) =>
- if (kind != UNIT)
- stack.pop;
-
- case THROW() =>
- stack.pop
-
- case DROP(kind) =>
- stack.pop
-
- case DUP(kind) =>
- stack.push(stack.head)
-
- case MONITOR_ENTER() =>
- stack.pop
-
- case MONITOR_EXIT() =>
- stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
-
- case LOAD_EXCEPTION(_) =>
- stack.pop(stack.length)
- stack.push(typeLattice.Object)
-
- case _ =>
- dumpClassesAndAbort("Unknown instruction: " + i)
- }
-
- new TransferFunction(consumed, gens)
- }
-
- for (b <- blocks) {
- flowFun = flowFun + (b -> transfer(b))
+ var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+ var instrs = b.toList
+ while(!instrs.isEmpty) {
+ val i = instrs.head
+ result = mutatingInterpret(result, i)
+ instrs = instrs.tail
}
+ result
}
-*/
+
/** Abstract interpretation for one instruction. */
def interpret(in: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+ mutatingInterpret(out, i)
+ }
+
+ def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
val bindings = out.vars
val stack = out.stack
if (settings.debug.value) {
-// Console.println("[before] Stack: " + stack);
-// Console.println(i);
+ // Console.println("[before] Stack: " + stack);
+ // Console.println(i);
}
i match {
@@ -619,11 +410,292 @@ abstract class TypeFlowAnalysis {
}
}
+ case class CallsiteInfo(bb: icodes.BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol)
+
+ /**
+
+ A full type-flow analysis on a method computes in- and out-flows for each basic block (that's what MethodTFA does).
+
+ For the purposes of Inliner, doing so guarantees that an abstract typestack-slot is available by the time an inlining candidate (a CALL_METHOD instruction) is visited.
+ This subclass (MTFAGrowable) of MethodTFA also aims at performing such analysis on CALL_METHOD instructions, with some differences:
+
+ (a) early screening is performed while the type-flow is being computed (in an override of `blockTransfer`) by testing a subset of the conditions that Inliner checks later.
+ The reasoning here is: if the early check fails at some iteration, there's no chance a follow-up iteration (with a yet more lub-ed typestack-slot) will succeed.
+ Failure is sufficient to remove that particular CALL_METHOD from the typeflow's `remainingCALLs`.
+ A forward note: in case inlining occurs at some basic block B, all blocks reachable from B get their CALL_METHOD instructions considered again as candidates
+ (because of the more precise types that -- perhaps -- can be computed).
+
+ (b) in case the early check does not fail, no conclusive decision can be made, thus the CALL_METHOD stays `isOnwatchlist`.
+
+ In other words, `remainingCALLs` tracks those callsites that still remain as candidates for inlining, so that Inliner can focus on those.
+ `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time.
+
+ Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next).
+ A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining.
+ But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks.
+ In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite.
+ Those basic blocks not in that subgraph can be skipped altogether. That's why:
+ - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs`
+ - same check is performed before adding a block to the worklist, and as part of choosing successors.
+ The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis.
+
+ The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`.
+
+ @author Miguel Garcia, http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+
+ */
class MTFAGrowable extends MethodTFA {
import icodes._
- /** discards what must be discarded, blanks what needs to be blanked out, and keeps the rest. */
+ val remainingCALLs = mutable.Map.empty[opcodes.CALL_METHOD, CallsiteInfo]
+
+ val preCandidates = mutable.Set.empty[BasicBlock]
+
+ var callerLin: Traversable[BasicBlock] = null
+
+ override def run {
+
+ timer.start
+ forwardAnalysis(blockTransfer)
+ val t = timer.stop
+
+ /* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
+ whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
+ In order to keep `analyzeMethod()` simple, we collect in `preCandidates` those basic blocks containing at least one candidate. */
+ preCandidates.clear()
+ for(rc <- remainingCALLs) {
+ preCandidates += rc._2.bb
+ }
+
+ if (settings.debug.value) {
+ for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
+ assert(visited.contains(b),
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
+ }
+ }
+
+ }
+
+ var shrinkedWatchlist = false
+
+ /*
+ This is the method where information cached elsewhere is put to use. References are given those other places that populate those caches.
+
+ The goal is avoiding computing type-flows for blocks we don't need (ie blocks not tracked in `relevantBBs`). The method used to add to `relevantBBs` is `putOnRadar`.
+
+ Moreover, it's often the case that the last CALL_METHOD of interest ("of interest" equates to "being tracked in `isOnWatchlist`) isn't the last instruction on the block.
+ There are cases where the typeflows computed past this `lastInstruction` are needed, and cases when they aren't.
+ The reasoning behind this decsision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop)
+ is querying `isOnPerimeter`.
+
+ Upon visiting a CALL_METHOD that's an inlining candidate, the relevant pieces of information about the pre-instruction typestack are collected for future use.
+ That is, unless the candidacy test fails. The reasoning here is: if such early check fails at some iteration, there's no chance a follow-up iteration
+ (with a yet more lub-ed typestack-slot) will succeed. In case of failure we can safely remove the CALL_METHOD from both `isOnWatchlist` and `remainingCALLs`.
+
+ */
+ override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
+ var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ var isPastLast = false
+
+ var instrs = b.toList
+ while(!isPastLast && !instrs.isEmpty) {
+ val i = instrs.head
+
+ if(isOnWatchlist(i)) {
+ val cm = i.asInstanceOf[opcodes.CALL_METHOD]
+ val msym = cm.method
+ val paramsLength = msym.info.paramTypes.size
+ val receiver = result.stack.types.drop(paramsLength).head match {
+ case REFERENCE(s) => s
+ case _ => NoSymbol // e.g. the scrutinee is BOX(s) or ARRAY
+ }
+ val concreteMethod = inliner.lookupImplFor(msym, receiver)
+ val isCandidate = {
+ ( inliner.isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isEffectivelyFinal ) &&
+ !blackballed(concreteMethod)
+ }
+ if(isCandidate) {
+ remainingCALLs += Pair(cm, CallsiteInfo(b, receiver, result.stack.length, concreteMethod))
+ } else {
+ remainingCALLs.remove(cm)
+ isOnWatchlist.remove(cm)
+ shrinkedWatchlist = true
+ }
+ }
+
+ isPastLast = (i eq stopAt)
+
+ if(!isPastLast) {
+ result = mutatingInterpret(result, i)
+ instrs = instrs.tail
+ }
+ }
+
+ result
+ } // end of method blockTransfer
+
+ val isOnWatchlist = mutable.Set.empty[Instruction]
+
+ /* Each time CallerCalleeInfo.isSafeToInline determines a concrete callee is unsafe to inline in the current caller,
+ the fact is recorded in this TFA instance for the purpose of avoiding devoting processing to that callsite next time.
+ The condition of "being unsafe to inline in the current caller" sticks across inlinings and TFA re-inits
+ because it depends on the instructions of the callee, which stay unchanged during the course of `analyzeInc(caller)`
+ (with the caveat of the side-effecting `makePublic` in `helperIsSafeToInline`).*/
+ val knownUnsafe = mutable.Set.empty[Symbol]
+ val knownSafe = mutable.Set.empty[Symbol]
+ val knownNever = mutable.Set.empty[Symbol] // `knownNever` needs be cleared only at the very end of the inlining phase (unlike `knownUnsafe` and `knownSafe`)
+ @inline final def blackballed(msym: Symbol): Boolean = { knownUnsafe(msym) || knownNever(msym) }
+
+ val relevantBBs = mutable.Set.empty[BasicBlock]
+
+ private def isPreCandidate(cm: opcodes.CALL_METHOD): Boolean = {
+ val msym = cm.method
+ val style = cm.style
+ // Dynamic == normal invocations
+ // Static(true) == calls to private members
+ !msym.isConstructor && !blackballed(msym) &&
+ (style.isDynamic || (style.hasInstance && style.isStatic))
+ // && !(msym hasAnnotation definitions.ScalaNoInlineClass)
+ }
+
+ override def init(m: icodes.IMethod) {
+ super.init(m)
+ remainingCALLs.clear()
+ knownUnsafe.clear()
+ knownSafe.clear()
+ // initially populate the watchlist with all callsites standing a chance of being inlined
+ isOnWatchlist.clear()
+ relevantBBs.clear()
+ /* TODO Do we want to perform inlining in non-finally exception handlers?
+ * Seems counterproductive (the larger the method the less likely it will be JITed.
+ * It's not that putting on radar only `linearizer linearizeAt (m, m.startBlock)` makes for much shorter inlining times (a minor speedup nonetheless)
+ * but the effect on method size could be explored. */
+ putOnRadar(m.linearizedBlocks(linearizer))
+ populatePerimeter()
+ assert(relevantBBs.isEmpty || relevantBBs.contains(m.startBlock), "you gave me dead code")
+ }
+
+ def conclusives(b: BasicBlock): List[opcodes.CALL_METHOD] = {
+ knownBeforehand(b) filter { cm => inliner.isMonadicMethod(cm.method) || inliner.hasInline(cm.method) }
+ }
+
+ def knownBeforehand(b: BasicBlock): List[opcodes.CALL_METHOD] = {
+ b.toList collect { case c : opcodes.CALL_METHOD => c } filter { cm => isPreCandidate(cm) && isReceiverKnown(cm) }
+ }
+
+ private def isReceiverKnown(cm: opcodes.CALL_METHOD): Boolean = {
+ cm.method.isEffectivelyFinal && cm.method.owner.isEffectivelyFinal
+ }
+
+ private def putOnRadar(blocks: Traversable[BasicBlock]) {
+ for(bb <- blocks) {
+ val preCands = bb.toList collect {
+ case cm : opcodes.CALL_METHOD
+ if isPreCandidate(cm) /* && !isReceiverKnown(cm) */
+ => cm
+ }
+ isOnWatchlist ++= preCands
+ }
+ relevantBBs ++= blocks
+ }
+
+ /* the argument is also included in the result */
+ private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
+
+ /* those BBs in the argument are also included in the result */
+ private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
+ val result = mutable.Set.empty[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.predecessors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toSet
+ }
+
+ /* those BBs in the argument are also included in the result */
+ private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
+ val result = mutable.Set.empty[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toSet
+ }
+
+ /* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
+ * In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
+ * In particular we can do without computing the outflow at B. */
+ private def populatePerimeter() {
+ isOnPerimeter.clear()
+ var done = true
+ do {
+ val (frontier, toPrune) = (relevantBBs filter hasNoRelevantSuccs) partition isWatching
+ isOnPerimeter ++= frontier
+ relevantBBs --= toPrune
+ done = toPrune.isEmpty
+ } while(!done)
+
+ lastInstruction.clear()
+ for(b <- isOnPerimeter; val lastIns = b.toList.reverse find isOnWatchlist) {
+ lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD])
+ }
+
+ // assertion: "no relevant block can have a predecessor that is on perimeter"
+ assert((for (b <- relevantBBs; if transitivePreds(b.predecessors) exists isOnPerimeter) yield b).isEmpty)
+ }
+
+ private val isOnPerimeter = mutable.Set.empty[BasicBlock]
+ private val lastInstruction = mutable.Map.empty[BasicBlock, opcodes.CALL_METHOD]
+
+ def hasNoRelevantSuccs(x: BasicBlock): Boolean = { !(x.successors exists relevantBBs) }
+
+ def isWatching(x: BasicBlock): Boolean = (x.toList exists isOnWatchlist)
+
+
+
+
+ /**
+
+ This method is invoked after one or more inlinings have been performed in basic blocks whose in-flow is non-bottom (this makes a difference later).
+ What we know about those inlinings is given by:
+
+ - `staleOut`: These are the blocks where a callsite was inlined.
+ For each callsite, all instructions in that block before the callsite were left in the block, and the rest moved to an `afterBlock`.
+ The out-flow of these basic blocks is thus in general stale, that's why we'll add them to the TFA worklist.
+
+ - `inlined` : These blocks were spliced into the method's CFG as part of inlining. Being new blocks, they haven't been visited yet by the typeflow analysis.
+
+ - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appearead
+ after a callsite in a `staleOut` block.
+
+ Based on the above information, we have to bring up-to-date the caches that `forwardAnalysis` and `blockTransfer` use to skip blocks and instructions.
+ Those caches are `relevantBBs` and `isOnPerimeter` (for blocks) and `isOnWatchlist` and `lastInstruction` (for CALL_METHODs).
+ Please notice that all `inlined` and `staleIn` blocks are reachable from `staleOut` blocks.
+
+ The update takes place in two steps:
+
+ (1) `staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }`
+ This results in initial populations for `relevantBBs` and `isOnWatchlist`.
+ Because of the way `isPreCandidate` reuses previous decision-outcomes that are still valid,
+ this already prunes some candidates standing no chance of being inlined.
+
+ (2) `populatePerimeter()`
+ Based on the CFG-subgraph determined in (1) as reflected in `relevantBBs`,
+ this method detects some blocks whose typeflows aren't needed past a certain CALL_METHOD
+ (not needed because none of its successors is relevant for the purposes of inlining, see `hasNoRelevantSuccs`).
+ The blocks thus chosen are said to be "on the perimeter" of the CFG-subgraph.
+ For each of them, its `lastInstruction` (after which no more typeflows are needed) is found.
+
+ */
def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: collection.Set[BasicBlock], staleIn: collection.Set[BasicBlock]) {
if (this.method == null || this.method.symbol != m.symbol) {
init(m)
@@ -633,31 +705,102 @@ abstract class TypeFlowAnalysis {
return;
}
- reinit {
- // asserts conveying an idea what CFG shapes arrive here.
- // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
- // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
- // inlined foreach (p => assert( !in.isDefinedAt(p), p))
- // inlined foreach (p => assert(!out.isDefinedAt(p), p))
- // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
- // staleOut foreach (p => assert( in.isDefinedAt(p), p))
-
- // never rewrite in(m.startBlock)
- staleOut foreach { b =>
- if(!inlined.contains(b)) { worklist += b }
- out(b) = typeFlowLattice.bottom
- }
- // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
- blankOut(inlined)
- blankOut(staleIn)
- // no need to add startBlocks from m.exh
+ worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+
+ // asserts conveying an idea what CFG shapes arrive here:
+ // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
+ // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert( !in.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
+ // staleOut foreach (p => assert( in.isDefinedAt(p), p))
+
+ // remainingCALLs.clear()
+ isOnWatchlist.clear()
+ relevantBBs.clear()
+
+ // never rewrite in(m.startBlock)
+ staleOut foreach { b =>
+ enqueue(b)
+ out(b) = typeFlowLattice.bottom
}
+ // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
+ blankOut(inlined)
+ blankOut(staleIn)
+ // no need to add startBlocks from m.exh
+
+ staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }
+ populatePerimeter()
+
+ } // end of method reinit
+
+ /* this is not a general purpose method to add to the worklist,
+ * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
+ private def enqueue(b: BasicBlock) {
+ assert(in(b) ne typeFlowLattice.bottom)
+ if(!worklist.contains(b)) { worklist += b }
+ }
+
+ /* this is not a general purpose method to add to the worklist,
+ * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
+ private def enqueue(bs: Traversable[BasicBlock]) {
+ bs foreach enqueue
}
private def blankOut(blocks: collection.Set[BasicBlock]) {
blocks foreach { b =>
- in(b) = typeFlowLattice.bottom
- out(b) = typeFlowLattice.bottom
+ in(b) = typeFlowLattice.bottom
+ out(b) = typeFlowLattice.bottom
+ }
+ }
+
+ /*
+ This is basically the plain-old forward-analysis part of a dataflow algorithm,
+ adapted to skip non-relevant blocks (as determined by `reinit()` via `populatePerimeter()`).
+
+ The adaptations are:
+
+ - only relevant blocks dequeued from the worklist move on to have the transfer function applied
+
+ - `visited` now means the transfer function was applied to the block,
+ but please notice that this does not imply anymore its out-flow to be different from bottom,
+ because a block on the perimeter will have per-instruction typeflows computed only up to its `lastInstruction`.
+ In case you need to know whether a visted block `v` has been "fully visited", evaluate `out(v) ne typeflowLattice.bottom`
+
+ - given that the transfer function may remove callsite-candidates from the watchlist (thus, they are not candidates anymore)
+ there's an opportunity to detect whether a previously relevant block has been left without candidates.
+ That's what `shrinkedWatchlist` detects. Provided the block was on the perimeter, we know we can skip it from now now,
+ and we can also constrain the CFG-subgraph by finding a new perimeter (thus the invocation to `populatePerimeter()`).
+ */
+ override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
+ while (!worklist.isEmpty && relevantBBs.nonEmpty) {
+ if (stat) iterations += 1
+ val point = worklist.iterator.next; worklist -= point;
+ if(relevantBBs(point)) {
+ shrinkedWatchlist = false
+ val output = f(point, in(point))
+ visited += point;
+ if(isOnPerimeter(point)) {
+ if(shrinkedWatchlist && !isWatching(point)) {
+ relevantBBs -= point;
+ populatePerimeter()
+ }
+ } else {
+ val propagate = ((lattice.bottom == out(point)) || output != out(point))
+ if (propagate) {
+ out(point) = output
+ val succs = point.successors filter relevantBBs
+ succs foreach { p =>
+ assert((p.predecessors filter isOnPerimeter).isEmpty)
+ val updated = lattice.lub(List(output, in(p)), p.exceptionHandlerStart)
+ if(updated != in(p)) {
+ in(p) = updated
+ enqueue(p)
+ }
+ }
+ }
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 865bacffaa..c217869a48 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -23,9 +23,7 @@ trait BytecodeWriters {
import global._
private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor {
- atPhase(currentRun.flattenPhase.prev)(sym.sourceFile)
- }
+ settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
)
private def getFile(base: AbstractFile, cls: JClass, suffix: String): AbstractFile = {
var dir = base
@@ -85,7 +83,7 @@ trait BytecodeWriters {
emitJavap(bytes, javapFile)
}
}
-
+
trait ClassBytecodeWriter extends BytecodeWriter {
def writeClass(label: String, jclass: JClass, sym: Symbol) {
val outfile = getFile(sym, jclass, ".class")
@@ -96,18 +94,18 @@ trait BytecodeWriters {
informProgress("wrote '" + label + "' to " + outfile)
}
}
-
+
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
-
+
abstract override def writeClass(label: String, jclass: JClass, sym: Symbol) {
super.writeClass(label, jclass, sym)
-
+
val pathName = jclass.getName()
var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
dumpFile.parent.createDirectory()
val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
-
+
try jclass writeTo outstream
finally outstream.close()
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index b5232fff09..c609f126d3 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -37,21 +37,19 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
/** Create a new phase */
override def newPhase(p: Phase): Phase = new JvmPhase(p)
- private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor {
- atPhase(currentRun.flattenPhase.prev)(sym.sourceFile)
- }
- )
- private def getFile(base: AbstractFile, cls: JClass, suffix: String): AbstractFile = {
+ private def outputDirectory(sym: Symbol): AbstractFile =
+ settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
+
+ private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
var dir = base
- val pathParts = cls.getName().split("[./]").toList
+ val pathParts = clsName.split("[./]").toList
for (part <- pathParts.init) {
dir = dir.subdirectoryNamed(part)
}
dir.fileNamed(pathParts.last + suffix)
}
- private def getFile(sym: Symbol, cls: JClass, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), cls, suffix)
+ private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ getFile(outputDirectory(sym), clsName, suffix)
/** JVM code generation phase
*/
@@ -87,7 +85,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// succeed or warn that it isn't.
hasApproximate && {
// Before erasure so we can identify generic mains.
- atPhase(currentRun.erasurePhase) {
+ beforeErasure {
val companion = sym.linkedClassOfClass
val companionMain = companion.tpe.member(nme.main)
@@ -154,14 +152,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (settings.Ygenjavap.isDefault) {
if(settings.Ydumpclasses.isDefault)
new ClassBytecodeWriter { }
- else
+ else
new ClassBytecodeWriter with DumpBytecodeWriter { }
}
else new ClassBytecodeWriter with JavapBytecodeWriter { }
}
val codeGenerator = new BytecodeGenerator(bytecodeWriter)
- log("Created new bytecode generator for " + classes.size + " classes.")
+ debuglog("Created new bytecode generator for " + classes.size + " classes.")
sortedClasses foreach { c =>
try codeGenerator.genClass(c)
@@ -197,8 +195,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val StringBuilderClassName = javaName(definitions.StringBuilderClass)
val BoxesRunTime = "scala.runtime.BoxesRunTime"
- val StringBuilderType = new JObjectType(StringBuilderClassName)
- val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY)
+ val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType
+ val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType
val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
val MethodTypeType = new JObjectType("java.dyn.MethodType")
val JavaLangClassType = new JObjectType("java.lang.Class")
@@ -209,17 +207,18 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
-
+
final val ExcludedForwarderFlags = {
import Flags._
- ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | BridgeAndPrivateFlags )
+ // Should include DEFERRED but this breaks findMember.
+ ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags )
}
// Additional interface parents based on annotations and other cues
- def newParentForAttr(attr: Symbol): Option[Type] = attr match {
- case SerializableAttr => Some(SerializableClass.tpe)
- case CloneableAttr => Some(JavaCloneableClass.tpe)
- case RemoteAttr => Some(RemoteInterfaceClass.tpe)
+ def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
+ case SerializableAttr => Some(SerializableClass)
+ case CloneableAttr => Some(JavaCloneableClass)
+ case RemoteAttr => Some(RemoteInterfaceClass)
case _ => None
}
@@ -232,11 +231,47 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
vp
}
+ private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
+ val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
+ val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
+
+ Pair("boxTo" + boxedType.decodedName, mtype)
+ }
+
+ private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
+ BOOL -> helperBoxTo(BOOL) ,
+ BYTE -> helperBoxTo(BYTE) ,
+ CHAR -> helperBoxTo(CHAR) ,
+ SHORT -> helperBoxTo(SHORT) ,
+ INT -> helperBoxTo(INT) ,
+ LONG -> helperBoxTo(LONG) ,
+ FLOAT -> helperBoxTo(FLOAT) ,
+ DOUBLE -> helperBoxTo(DOUBLE)
+ )
+
+ private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
+ val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
+ val mname = "unboxTo" + kind.toType.typeSymbol.decodedName
+
+ Pair(mname, mtype)
+ }
+
+ private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
+ BOOL -> helperUnboxTo(BOOL) ,
+ BYTE -> helperUnboxTo(BYTE) ,
+ CHAR -> helperUnboxTo(CHAR) ,
+ SHORT -> helperUnboxTo(SHORT) ,
+ INT -> helperUnboxTo(INT) ,
+ LONG -> helperUnboxTo(LONG) ,
+ FLOAT -> helperUnboxTo(FLOAT) ,
+ DOUBLE -> helperUnboxTo(DOUBLE)
+ )
+
var clasz: IClass = _
var method: IMethod = _
var jclass: JClass = _
var jmethod: JMethod = _
-// var jcode: JExtendedCode = _
+ // var jcode: JExtendedCode = _
def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr
@@ -264,7 +299,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
private def innerClassSymbolFor(s: Symbol): Symbol =
if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
- override def javaName(sym: Symbol): String = {
+ override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster.
/**
* Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
*
@@ -272,13 +307,16 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* of inner class all until root class.
*/
def collectInnerClass(s: Symbol): Unit = {
- // TODO: something atPhase(currentRun.flattenPhase.prev) which accounts for
+ // TODO: some beforeFlatten { ... } which accounts for
// being nested in parameterized classes (if we're going to selectively flatten.)
val x = innerClassSymbolFor(s)
- val isInner = x.isClass && !x.rawowner.isPackageClass
- if (isInner) {
- innerClassBuffer += x
- collectInnerClass(x.rawowner)
+ if(x ne NoSymbol) {
+ assert(x.isClass, "not an inner-class symbol")
+ val isInner = !x.rawowner.isPackageClass
+ if (isInner) {
+ innerClassBuffer += x
+ collectInnerClass(x.rawowner)
+ }
}
}
collectInnerClass(sym)
@@ -340,38 +378,44 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
private var innerClassBuffer = mutable.LinkedHashSet[Symbol]()
- /** Drop redundant interfaces (ones which are implemented by some
- * other parent) from the immediate parents. This is important on
- * android because there is otherwise an interface explosion.
+ /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
*/
- private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = (
- interfaces filterNot (int1 =>
- interfaces exists (int2 =>
- (int1 ne int2) && (int2 isSubClass int1)
- )
- )
- )
+ private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = {
+ var rest = interfaces
+ var leaves = List.empty[Symbol]
+ while(!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+ if(!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+ }
+ rest = rest.tail
+ }
+
+ leaves
+ }
def genClass(c: IClass) {
clasz = c
innerClassBuffer.clear()
val name = javaName(c.symbol)
- val superClass :: superInterfaces = {
- val parents0 = c.symbol.info.parents match {
- case Nil => List(ObjectClass.tpe)
- case ps => ps
- }
- parents0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
- }
- val ifaces = superInterfaces match {
- case Nil => JClass.NO_INTERFACES
- case _ => mkArray(minimizeInterfaces(superInterfaces map (_.typeSymbol)) map javaName)
- }
+
+ val ps = c.symbol.info.parents
+
+ val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol;
+
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
+
+ val ifaces =
+ if(superInterfaces.isEmpty) JClass.NO_INTERFACES
+ else mkArray(minimizeInterfaces(superInterfaces) map javaName)
jclass = fjbgContext.JClass(javaFlags(c.symbol),
name,
- javaName(superClass.typeSymbol),
+ javaName(superClass),
ifaces,
c.cunit.source.toString)
@@ -393,7 +437,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// it must be a top level class (name contains no $s)
def isCandidateForForwarders(sym: Symbol): Boolean =
- atPhase(currentRun.picklerPhase.next) {
+ afterPickler {
!(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
}
@@ -433,7 +477,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) {
val sym = clazz.originalEnclosingMethod
if (sym.isMethod) {
- log("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
jclass,
javaName(sym.enclClass),
@@ -449,7 +493,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
enclClass, clazz)
)
else {
- log("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
jclass,
javaName(enclClass),
@@ -681,7 +725,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
)
def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
if (needsGenericSignature(sym)) {
- val memberTpe = atPhase(currentRun.erasurePhase)(owner.thisType.memberInfo(sym))
+ val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
// println("addGenericSignature sym: " + sym.fullName + " : " + memberTpe + " sym.info: " + sym.info)
// println("addGenericSignature: "+ (sym.ownerChain map (x => (x.name, x.isImplClass))))
erasure.javaSig(sym, memberTpe) foreach { sig =>
@@ -700,7 +744,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
return
}
if ((settings.check.value contains "genjvm")) {
- val normalizedTpe = atPhase(currentRun.erasurePhase)(erasure.prepareSigMap(memberTpe))
+ val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym, normalizedTpe) =:= bytecodeTpe)) {
clasz.cunit.warning(sym.pos,
@@ -716,9 +760,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
val index = jmember.getConstantPool.addUtf8(sig).toShort
if (opt.verboseDebug)
- atPhase(currentRun.erasurePhase) {
- println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)
- }
+ beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index))
+
val buf = ByteBuffer.allocate(2)
buf putShort index
addAttribute(jmember, tpnme.SignatureATTR, buf)
@@ -793,14 +836,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
innerSym.rawname + innerSym.moduleSuffix
// add inner classes which might not have been referenced yet
- atPhase(currentRun.erasurePhase.next) {
+ afterErasure {
for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
innerClassBuffer += m
}
val allInners = innerClassBuffer.toList
if (allInners.nonEmpty) {
- log(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
+ debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
val innerClassesAttr = jclass.getInnerClasses()
// sort them so inner classes succeed their enclosing class
// to satisfy the Eclipse Java compiler
@@ -826,6 +869,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
def genField(f: IField) {
debuglog("Adding field: " + f.symbol.fullName)
+
val jfield = jclass.addNewField(
javaFlags(f.symbol) | javaFieldFlags(f.symbol),
javaName(f.symbol),
@@ -1128,8 +1172,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
linearization = linearizer.linearize(m)
val labels = makeLabels(linearization)
- /** local variables whose scope appears in this block. */
- val varsInBlock: mutable.Set[Local] = new mutable.HashSet
var nextBlock: BasicBlock = linearization.head
@@ -1139,302 +1181,298 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
}
- /** Generate exception handlers for the current method. */
- def genExceptionHandlers() {
+ /** Generate exception handlers for the current method. */
+ def genExceptionHandlers() {
- /** Return a list of pairs of intervals where the handler is active.
- * The intervals in the list have to be inclusive in the beginning and
- * exclusive in the end: [start, end).
- */
- def ranges(e: ExceptionHandler): List[(Int, Int)] = {
- var covered = e.covered
- var ranges: List[(Int, Int)] = Nil
- var start = -1
- var end = -1
-
- linearization foreach { b =>
- if (! (covered contains b) ) {
- if (start >= 0) { // we're inside a handler range
- end = labels(b).getAnchor()
- ranges ::= ((start, end))
- start = -1
+ /** Return a list of pairs of intervals where the handler is active.
+ * The intervals in the list have to be inclusive in the beginning and
+ * exclusive in the end: [start, end).
+ */
+ def ranges(e: ExceptionHandler): List[(Int, Int)] = {
+ var covered = e.covered
+ var ranges: List[(Int, Int)] = Nil
+ var start = -1
+ var end = -1
+
+ linearization foreach { b =>
+ if (! (covered contains b) ) {
+ if (start >= 0) { // we're inside a handler range
+ end = labels(b).getAnchor()
+ ranges ::= ((start, end))
+ start = -1
+ }
+ } else {
+ if (start < 0) // we're not inside a handler range
+ start = labels(b).getAnchor()
+
+ end = endPC(b)
+ covered -= b
}
- } else {
- if (start < 0) // we're not inside a handler range
- start = labels(b).getAnchor()
+ }
- end = endPC(b)
- covered -= b
+ /* Add the last interval. Note that since the intervals are
+ * open-ended to the right, we have to give a number past the actual
+ * code!
+ */
+ if (start >= 0) {
+ ranges ::= ((start, jcode.getPC()))
}
- }
- /* Add the last interval. Note that since the intervals are
- * open-ended to the right, we have to give a number past the actual
- * code!
- */
- if (start >= 0) {
- ranges ::= ((start, jcode.getPC()))
+ if (!covered.isEmpty)
+ debuglog("Some covered blocks were not found in method: " + method +
+ " covered: " + covered + " not in " + linearization)
+ ranges
}
- if (!covered.isEmpty)
- debuglog("Some covered blocks were not found in method: " + method +
- " covered: " + covered + " not in " + linearization)
- ranges
+ for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
+ if (p._1 < p._2) {
+ debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
+ " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
+ val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
+ else javaName(e.cls)
+ jcode.addExceptionHandler(p._1, p._2,
+ labels(e.startBlock).getAnchor(),
+ cls)
+ } else
+ log("Empty exception range: " + p)
+ }
}
- for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
- if (p._1 < p._2) {
- debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
- else javaName(e.cls)
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- cls)
- } else
- log("Empty exception range: " + p)
+ def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
+ target.isPublic || target.isProtected && {
+ (site.enclClass isSubClass target.enclClass) ||
+ (site.enclosingPackage == target.privateWithin)
+ }
}
- }
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- }
+ def genCallMethod(call: CALL_METHOD) {
+ val CALL_METHOD(method, style) = call
+ val siteSymbol = clasz.symbol
+ val hostSymbol = call.hostClass
+ val methodOwner = method.owner
+ // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+ hostSymbol.info ; methodOwner.info
+
+ def isInterfaceCall(sym: Symbol) = (
+ sym.isInterface && methodOwner != ObjectClass
+ || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
+ )
+ // whether to reference the type of the receiver or
+ // the type of the method owner (if not an interface!)
+ val useMethodOwner = (
+ style != Dynamic
+ || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
+ || hostSymbol.isBottomClass
+ )
+ val receiver = if (useMethodOwner) methodOwner else hostSymbol
+ val jowner = javaName(receiver)
+ val jname = javaName(method)
+ val jtype = javaType(method).asInstanceOf[JMethodType]
- def genCallMethod(call: CALL_METHOD) {
- val CALL_METHOD(method, style) = call
- val siteSymbol = clasz.symbol
- val hostSymbol = call.hostClass
- val methodOwner = method.owner
- // info calls so that types are up to date; erasure may add lateINTERFACE to traits
- hostSymbol.info ; methodOwner.info
-
- def isInterfaceCall(sym: Symbol) = (
- sym.isInterface && methodOwner != ObjectClass
- || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
- )
- // whether to reference the type of the receiver or
- // the type of the method owner (if not an interface!)
- val useMethodOwner = (
- style != Dynamic
- || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
- || hostSymbol.isBottomClass
- )
- val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val jowner = javaName(receiver)
- val jname = javaName(method)
- val jtype = javaType(method).asInstanceOf[JMethodType]
-
- def emit(invoke: String) {
- log("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
- invoke match {
- case "invokeinterface" => jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
- case "invokevirtual" => jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
- case "invokespecial" => jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- case "invokestatic" => jcode.emitINVOKESTATIC(jowner, jname, jtype)
+ def debugMsg(invoke: String) {
+ debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
}
- }
- def initModule() {
- // we initialize the MODULE$ field immediately after the super ctor
- if (isStaticModule(siteSymbol) && !isModuleInitialized &&
- jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
- isModuleInitialized = true
- jcode.emitALOAD_0()
- jcode.emitPUTSTATIC(jclass.getName(),
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
+
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (isStaticModule(siteSymbol) && !isModuleInitialized &&
+ jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
+ jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
+ isModuleInitialized = true
+ jcode.emitALOAD_0()
+ jcode.emitPUTSTATIC(jclass.getName(),
+ nme.MODULE_INSTANCE_FIELD.toString,
+ jclass.getType())
+ }
}
- }
- style match {
- case Static(true) => emit("invokespecial")
- case Static(false) => emit("invokestatic")
- case Dynamic if isInterfaceCall(receiver) => emit("invokeinterface")
- case Dynamic => emit("invokevirtual")
- case SuperCall(_) => emit("invokespecial") ; initModule()
+ style match {
+ case Static(true) => jcode.emitINVOKESPECIAL (jowner, jname, jtype) ; debugMsg("invokespecial")
+ case Static(false) => jcode.emitINVOKESTATIC (jowner, jname, jtype) ; debugMsg("invokestatic")
+ case Dynamic if isInterfaceCall(receiver) => jcode.emitINVOKEINTERFACE(jowner, jname, jtype) ; debugMsg("invokinterface")
+ case Dynamic => jcode.emitINVOKEVIRTUAL (jowner, jname, jtype) ; debugMsg("invokevirtual")
+ case SuperCall(_) =>
+ jcode.emitINVOKESPECIAL(jowner, jname, jtype)
+ initModule()
+ debugMsg("invokespecial")
+ }
}
- }
- def genBlock(b: BasicBlock) {
- labels(b).anchorToNext()
+ def genBlock(b: BasicBlock) {
+ labels(b).anchorToNext()
- debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
- var lastMappedPC = 0
- var lastLineNr = 0
- var crtPC = 0
- varsInBlock.clear()
+ debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
+ var lastMappedPC = 0
+ var lastLineNr = 0
+ var crtPC = 0
- for (instr <- b) {
+ /** local variables whose scope appears in this block. */
+ val varsInBlock: mutable.Set[Local] = new mutable.HashSet
+ val lastInstr = b.lastInstruction
- instr match {
- case THIS(clasz) =>
- jcode.emitALOAD_0()
+ for (instr <- b) {
- case CONSTANT(const) =>
- genConstant(jcode, const)
+ instr match {
+ case THIS(clasz) => jcode.emitALOAD_0()
- case LOAD_ARRAY_ITEM(kind) =>
- jcode.emitALOAD(javaType(kind))
+ case CONSTANT(const) => genConstant(jcode, const)
- case LOAD_LOCAL(local) =>
- jcode.emitLOAD(indexOf(local), javaType(local.kind))
+ case LOAD_ARRAY_ITEM(kind) =>
+ if(kind.isRefOrArrayType) { jcode.emitAALOAD() }
+ else {
+ (kind: @unchecked) match {
+ case UNIT => throw new IllegalArgumentException("invalid type for aload " + kind)
+ case BOOL | BYTE => jcode.emitBALOAD()
+ case SHORT => jcode.emitSALOAD()
+ case CHAR => jcode.emitCALOAD()
+ case INT => jcode.emitIALOAD()
+ case LONG => jcode.emitLALOAD()
+ case FLOAT => jcode.emitFALOAD()
+ case DOUBLE => jcode.emitDALOAD()
+ }
+ }
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- if (isStatic)
- jcode.emitGETSTATIC(owner,
- javaName(field),
- javaType(field))
- else
- jcode.emitGETFIELD(owner,
- javaName(field),
- javaType(field))
-
- case LOAD_MODULE(module) =>
-// assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " +
- Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
- jcode.emitALOAD_0()
- else
- jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
- nme.MODULE_INSTANCE_FIELD.toString,
- javaType(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- jcode emitASTORE javaType(kind)
-
- case STORE_LOCAL(local) =>
- jcode.emitSTORE(indexOf(local), javaType(local.kind))
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jcode.emitASTORE_0()
-
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- if (isStatic)
- jcode.emitPUTSTATIC(owner,
- javaName(field),
- javaType(field))
- else
- jcode.emitPUTFIELD(owner,
- javaName(field),
- javaType(field))
-
- case CALL_PRIMITIVE(primitive) =>
- genPrimitive(primitive, instr.pos)
-
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getSignature()
- jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
-
- case call @ CALL_METHOD(method, style) =>
- genCallMethod(call)
-
- case BOX(kind) =>
- val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
- val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
- jcode.emitINVOKESTATIC(BoxesRunTime, "boxTo" + boxedType.decodedName, mtype)
-
- case UNBOX(kind) =>
- val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
- jcode.emitINVOKESTATIC(BoxesRunTime, "unboxTo" + kind.toType.typeSymbol.decodedName, mtype)
-
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jcode emitNEW className
-
- case CREATE_ARRAY(elem, 1) => elem match {
- case REFERENCE(_) | ARRAY(_) =>
- jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType]
- case _ =>
- jcode emitNEWARRAY javaType(elem)
- }
+ case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind))
+
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner +
+ " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldJType = javaType(field)
+ if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType)
+ else jcode.emitGETFIELD( owner, fieldJName, fieldJType)
+
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
+ jcode.emitALOAD_0()
+ else
+ jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
+ nme.MODULE_INSTANCE_FIELD.toString,
+ javaType(module))
+
+ case STORE_ARRAY_ITEM(kind) =>
+ if(kind.isRefOrArrayType) { jcode.emitAASTORE() }
+ else {
+ (kind: @unchecked) match {
+ case UNIT => throw new IllegalArgumentException("invalid type for astore " + kind)
+ case BOOL | BYTE => jcode.emitBASTORE()
+ case SHORT => jcode.emitSASTORE()
+ case CHAR => jcode.emitCASTORE()
+ case INT => jcode.emitIASTORE()
+ case LONG => jcode.emitLASTORE()
+ case FLOAT => jcode.emitFASTORE()
+ case DOUBLE => jcode.emitDASTORE()
+ }
+ }
- case CREATE_ARRAY(elem, dims) =>
- jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
+ case STORE_LOCAL(local) =>
+ jcode.emitSTORE(indexOf(local), javaType(local.kind))
- case IS_INSTANCE(tpe) =>
- tpe match {
- case REFERENCE(cls) =>
- jcode emitINSTANCEOF new JObjectType(javaName(cls))
- case ARRAY(elem) =>
- jcode emitINSTANCEOF new JArrayType(javaType(elem))
- case _ =>
- abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jcode.emitASTORE_0()
- case CHECK_CAST(tpe) =>
- tpe match {
- case REFERENCE(cls) =>
- // No need to checkcast for Objects
- if (cls != ObjectClass)
- jcode emitCHECKCAST new JObjectType(javaName(cls))
- case ARRAY(elem) =>
- jcode emitCHECKCAST new JArrayType(javaType(elem))
- case _ =>
- abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldJType = javaType(field)
+ if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType)
+ else jcode.emitPUTFIELD( owner, fieldJName, fieldJType)
- case SWITCH(tags, branches) =>
- val tagArray = new Array[Array[Int]](tags.length)
- var caze = tags
- var i = 0
+ case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
- while (i < tagArray.length) {
- tagArray(i) = new Array[Int](caze.head.length)
- caze.head.copyToArray(tagArray(i), 0)
- i += 1
- caze = caze.tail
- }
- val branchArray = jcode.newLabels(tagArray.length)
- i = 0
- while (i < branchArray.length) {
- branchArray(i) = labels(branches(i))
- i += 1
- }
- debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
- jcode.emitSWITCH(tagArray,
- branchArray,
- labels(branches.last),
- MIN_SWITCH_DENSITY)
- ()
-
- case JUMP(whereto) =>
- if (nextBlock != whereto)
- jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
-
- case CJUMP(success, failure, cond, kind) =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getSignature()
+ jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
+
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
+
+ case BOX(kind) =>
+ val Pair(mname, mtype) = jBoxTo(kind)
+ jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
+
+ case UNBOX(kind) =>
+ val Pair(mname, mtype) = jUnboxTo(kind)
+ jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
+
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jcode emitNEW className
+
+ case CREATE_ARRAY(elem, 1) =>
+ if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] }
+ else { jcode emitNEWARRAY javaType(elem) }
+
+ case CREATE_ARRAY(elem, dims) =>
+ jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
+
+ case IS_INSTANCE(tpe) =>
+ tpe match {
+ case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls))
+ case ARRAY(elem) => jcode emitINSTANCEOF new JArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+
+ case CHECK_CAST(tpe) =>
+ tpe match {
+ case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects
+ case ARRAY(elem) => jcode emitCHECKCAST new JArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+
+ case SWITCH(tags, branches) =>
+ val tagArray = new Array[Array[Int]](tags.length)
+ var caze = tags
+ var i = 0
+
+ while (i < tagArray.length) {
+ tagArray(i) = new Array[Int](caze.head.length)
+ caze.head.copyToArray(tagArray(i), 0)
+ i += 1
+ caze = caze.tail
+ }
+ val branchArray = jcode.newLabels(tagArray.length)
+ i = 0
+ while (i < branchArray.length) {
+ branchArray(i) = labels(branches(i))
+ i += 1
+ }
+ debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
+ jcode.emitSWITCH(tagArray,
+ branchArray,
+ labels(branches.last),
+ MIN_SWITCH_DENSITY)
+ ()
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto)
+ jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
+
+ case CJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF_ICMP(conds(negate(cond)), labels(failure))
+ jcode.emitIF_ICMP(conds(cond.negate()), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ICMP(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case REFERENCE(_) | ARRAY(_) =>
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
if (nextBlock == success) {
- jcode.emitIF_ACMP(conds(negate(cond)), labels(failure))
+ jcode.emitIF_ACMP(conds(cond.negate()), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ACMP(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case _ =>
+ } else {
(kind: @unchecked) match {
case LONG => jcode.emitLCMP()
case FLOAT =>
@@ -1445,38 +1483,32 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
else jcode.emitDCMPL()
}
if (nextBlock == success) {
- jcode.emitIF(conds(negate(cond)), labels(failure))
+ jcode.emitIF(conds(cond.negate()), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF(conds(cond), labels(success));
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
- }
+ }
- case CZJUMP(success, failure, cond, kind) =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
+ case CZJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF(conds(negate(cond)), labels(failure))
+ jcode.emitIF(conds(cond.negate()), labels(failure))
} else {
jcode.emitIF(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case REFERENCE(_) | ARRAY(_) =>
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
val Success = success
val Failure = failure
(cond, nextBlock) match {
- case (EQ, Success) =>
- jcode emitIFNONNULL labels(failure)
- case (NE, Failure) =>
- jcode emitIFNONNULL labels(success)
- case (EQ, Failure) =>
- jcode emitIFNULL labels(success)
- case (NE, Success) =>
- jcode emitIFNULL labels(failure)
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
case (EQ, _) =>
jcode emitIFNULL labels(success)
jcode.emitGOTO_maybe_W(labels(failure), false)
@@ -1484,11 +1516,11 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
jcode emitIFNONNULL labels(success)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case _ =>
+ } else {
(kind: @unchecked) match {
case LONG =>
- jcode.emitLCONST_0(); jcode.emitLCMP()
+ jcode.emitLCONST_0()
+ jcode.emitLCMP()
case FLOAT =>
jcode.emitFCONST_0()
if (cond == LT || cond == LE) jcode.emitFCMPG()
@@ -1499,263 +1531,254 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
else jcode.emitDCMPL()
}
if (nextBlock == success) {
- jcode.emitIF(conds(negate(cond)), labels(failure))
+ jcode.emitIF(conds(cond.negate()), labels(failure))
} else {
jcode.emitIF(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
- }
+ }
- case RETURN(kind) =>
- jcode emitRETURN javaType(kind)
+ case RETURN(kind) => jcode emitRETURN javaType(kind)
- case THROW(_) =>
- jcode.emitATHROW()
+ case THROW(_) => jcode.emitATHROW()
- case DROP(kind) =>
- kind match {
- case LONG | DOUBLE => jcode.emitPOP2()
- case _ => jcode.emitPOP()
- }
+ case DROP(kind) =>
+ if(kind.isWideType) jcode.emitPOP2()
+ else jcode.emitPOP()
- case DUP(kind) =>
- kind match {
- case LONG | DOUBLE => jcode.emitDUP2()
- case _ => jcode.emitDUP()
- }
+ case DUP(kind) =>
+ if(kind.isWideType) jcode.emitDUP2()
+ else jcode.emitDUP()
- case MONITOR_ENTER() =>
- jcode.emitMONITORENTER()
+ case MONITOR_ENTER() => jcode.emitMONITORENTER()
- case MONITOR_EXIT() =>
- jcode.emitMONITOREXIT()
+ case MONITOR_EXIT() => jcode.emitMONITOREXIT()
- case SCOPE_ENTER(lv) =>
- varsInBlock += lv
- lv.start = jcode.getPC()
+ case SCOPE_ENTER(lv) =>
+ varsInBlock += lv
+ lv.start = jcode.getPC()
- case SCOPE_EXIT(lv) =>
- if (varsInBlock(lv)) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- varsInBlock -= lv
- }
- else if (b.varsInScope(lv)) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- b.varsInScope -= lv
- }
- else dumpMethodAndAbort(method, "Illegal local var nesting")
+ case SCOPE_EXIT(lv) =>
+ if (varsInBlock(lv)) {
+ lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
+ varsInBlock -= lv
+ }
+ else if (b.varsInScope(lv)) {
+ lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
+ b.varsInScope -= lv
+ }
+ else dumpMethodAndAbort(method, "Illegal local var nesting")
- case LOAD_EXCEPTION(_) =>
- ()
- }
+ case LOAD_EXCEPTION(_) =>
+ ()
+ }
- crtPC = jcode.getPC()
+ crtPC = jcode.getPC()
-// assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
-// val crtLine = instr.pos.line.get(lastLineNr);
+ // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
+ // val crtLine = instr.pos.line.get(lastLineNr);
- val crtLine = try {
- if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
- } catch {
- case _: UnsupportedOperationException =>
- log("Warning: wrong position in: " + method)
- lastLineNr
- }
+ val crtLine = try {
+ if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
+ } catch {
+ case _: UnsupportedOperationException =>
+ log("Warning: wrong position in: " + method)
+ lastLineNr
+ }
- if (b.lastInstruction == instr)
- endPC(b) = jcode.getPC()
+ if (instr eq lastInstr) { endPC(b) = jcode.getPC() }
- //System.err.println("CRTLINE: " + instr.pos + " " +
- // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
+ //System.err.println("CRTLINE: " + instr.pos + " " +
+ // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
- if (crtPC > lastMappedPC) {
- jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
- lastMappedPC = crtPC
- lastLineNr = crtLine
+ if (crtPC > lastMappedPC) {
+ jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
+ lastMappedPC = crtPC
+ lastLineNr = crtLine
+ }
}
- }
- // local vars that survived this basic block
- for (lv <- varsInBlock) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- }
- for (lv <- b.varsInScope) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
+ // local vars that survived this basic block
+ for (lv <- varsInBlock) {
+ lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
+ }
+ for (lv <- b.varsInScope) {
+ lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
+ }
}
- }
- /**
- * @param primitive ...
- * @param pos ...
- */
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitINEG()
- case LONG => jcode.emitLNEG()
- case FLOAT => jcode.emitFNEG()
- case DOUBLE => jcode.emitDNEG()
- case _ => abort("Impossible to negate a " + kind)
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD => jcode.emitADD(javaType(kind))
- case SUB =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitISUB()
- case LONG => jcode.emitLSUB()
- case FLOAT => jcode.emitFSUB()
- case DOUBLE => jcode.emitDSUB()
+ /**
+ * @param primitive ...
+ * @param pos ...
+ */
+ def genPrimitive(primitive: Primitive, pos: Position) {
+ primitive match {
+ case Negation(kind) =>
+ if(kind.isIntSizedType) { jcode.emitINEG() }
+ else {
+ kind match {
+ case LONG => jcode.emitLNEG()
+ case FLOAT => jcode.emitFNEG()
+ case DOUBLE => jcode.emitDNEG()
+ case _ => abort("Impossible to negate a " + kind)
}
+ }
- case MUL =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitIMUL()
- case LONG => jcode.emitLMUL()
- case FLOAT => jcode.emitFMUL()
- case DOUBLE => jcode.emitDMUL()
- }
+ case Arithmetic(op, kind) =>
+ op match {
+ case ADD =>
+ if(kind.isIntSizedType) { jcode.emitIADD() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLADD()
+ case FLOAT => jcode.emitFADD()
+ case DOUBLE => jcode.emitDADD()
+ }
+ }
- case DIV =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitIDIV()
- case LONG => jcode.emitLDIV()
- case FLOAT => jcode.emitFDIV()
- case DOUBLE => jcode.emitDDIV()
- }
+ case SUB =>
+ if(kind.isIntSizedType) { jcode.emitISUB() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLSUB()
+ case FLOAT => jcode.emitFSUB()
+ case DOUBLE => jcode.emitDSUB()
+ }
+ }
- case REM =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitIREM()
- case LONG => jcode.emitLREM()
- case FLOAT => jcode.emitFREM()
- case DOUBLE => jcode.emitDREM()
- }
+ case MUL =>
+ if(kind.isIntSizedType) { jcode.emitIMUL() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLMUL()
+ case FLOAT => jcode.emitFMUL()
+ case DOUBLE => jcode.emitDMUL()
+ }
+ }
- case NOT =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
+ case DIV =>
+ if(kind.isIntSizedType) { jcode.emitIDIV() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLDIV()
+ case FLOAT => jcode.emitFDIV()
+ case DOUBLE => jcode.emitDDIV()
+ }
+ }
+
+ case REM =>
+ if(kind.isIntSizedType) { jcode.emitIREM() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLREM()
+ case FLOAT => jcode.emitFREM()
+ case DOUBLE => jcode.emitDREM()
+ }
+ }
+
+ case NOT =>
+ if(kind.isIntSizedType) {
jcode.emitPUSH(-1)
jcode.emitIXOR()
- case LONG =>
+ } else if(kind == LONG) {
jcode.emitPUSH(-1l)
jcode.emitLXOR()
- case _ =>
+ } else {
abort("Impossible to negate an " + kind)
- }
-
- case _ =>
- abort("Unknown arithmetic primitive " + primitive)
- }
-
- case Logical(op, kind) => (op, kind) match {
- case (AND, LONG) =>
- jcode.emitLAND()
- case (AND, INT) =>
- jcode.emitIAND()
- case (AND, _) =>
- jcode.emitIAND()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (OR, LONG) =>
- jcode.emitLOR()
- case (OR, INT) =>
- jcode.emitIOR()
- case (OR, _) =>
- jcode.emitIOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (XOR, LONG) =>
- jcode.emitLXOR()
- case (XOR, INT) =>
- jcode.emitIXOR()
- case (XOR, _) =>
- jcode.emitIXOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
- }
-
- case Shift(op, kind) => (op, kind) match {
- case (LSL, LONG) =>
- jcode.emitLSHL()
- case (LSL, INT) =>
- jcode.emitISHL()
- case (LSL, _) =>
- jcode.emitISHL()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (ASR, LONG) =>
- jcode.emitLSHR()
- case (ASR, INT) =>
- jcode.emitISHR()
- case (ASR, _) =>
- jcode.emitISHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (LSR, LONG) =>
- jcode.emitLUSHR()
- case (LSR, INT) =>
- jcode.emitIUSHR()
- case (LSR, _) =>
- jcode.emitIUSHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
- }
+ }
- case Comparison(op, kind) => ((op, kind): @unchecked) match {
- case (CMP, LONG) => jcode.emitLCMP()
- case (CMPL, FLOAT) => jcode.emitFCMPL()
- case (CMPG, FLOAT) => jcode.emitFCMPG()
- case (CMPL, DOUBLE) => jcode.emitDCMPL()
- case (CMPG, DOUBLE) => jcode.emitDCMPL()
- }
+ case _ =>
+ abort("Unknown arithmetic primitive " + primitive)
+ }
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) {
- println("Illegal conversion at: " + clasz +
- " at: " + pos.source + ":" + pos.line)
- } else
- jcode.emitT2T(javaType(src), javaType(dst))
+ case Logical(op, kind) => (op, kind) match {
+ case (AND, LONG) => jcode.emitLAND()
+ case (AND, INT) => jcode.emitIAND()
+ case (AND, _) =>
+ jcode.emitIAND()
+ if (kind != BOOL)
+ jcode.emitT2T(javaType(INT), javaType(kind));
+
+ case (OR, LONG) => jcode.emitLOR()
+ case (OR, INT) => jcode.emitIOR()
+ case (OR, _) =>
+ jcode.emitIOR()
+ if (kind != BOOL)
+ jcode.emitT2T(javaType(INT), javaType(kind));
+
+ case (XOR, LONG) => jcode.emitLXOR()
+ case (XOR, INT) => jcode.emitIXOR()
+ case (XOR, _) =>
+ jcode.emitIXOR()
+ if (kind != BOOL)
+ jcode.emitT2T(javaType(INT), javaType(kind));
+ }
- case ArrayLength(_) =>
- jcode.emitARRAYLENGTH()
+ case Shift(op, kind) => (op, kind) match {
+ case (LSL, LONG) => jcode.emitLSHL()
+ case (LSL, INT) => jcode.emitISHL()
+ case (LSL, _) =>
+ jcode.emitISHL()
+ jcode.emitT2T(javaType(INT), javaType(kind))
+
+ case (ASR, LONG) => jcode.emitLSHR()
+ case (ASR, INT) => jcode.emitISHR()
+ case (ASR, _) =>
+ jcode.emitISHR()
+ jcode.emitT2T(javaType(INT), javaType(kind))
+
+ case (LSR, LONG) => jcode.emitLUSHR()
+ case (LSR, INT) => jcode.emitIUSHR()
+ case (LSR, _) =>
+ jcode.emitIUSHR()
+ jcode.emitT2T(javaType(INT), javaType(kind))
+ }
- case StartConcat =>
- jcode emitNEW StringBuilderClassName
- jcode.emitDUP()
- jcode.emitINVOKESPECIAL(StringBuilderClassName,
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
-
- case StringConcat(el) =>
- val jtype = el match {
- case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
- case _ => javaType(el)
+ case Comparison(op, kind) => ((op, kind): @unchecked) match {
+ case (CMP, LONG) => jcode.emitLCMP()
+ case (CMPL, FLOAT) => jcode.emitFCMPL()
+ case (CMPG, FLOAT) => jcode.emitFCMPG()
+ case (CMPL, DOUBLE) => jcode.emitDCMPL()
+ case (CMPG, DOUBLE) => jcode.emitDCMPL()
}
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "append",
- new JMethodType(StringBuilderType,
- Array(jtype)))
- case EndConcat =>
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "toString",
- toStringType)
- case _ =>
- abort("Unimplemented primitive " + primitive)
+ case Conversion(src, dst) =>
+ debuglog("Converting from: " + src + " to: " + dst)
+ if (dst == BOOL) {
+ println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line)
+ } else
+ jcode.emitT2T(javaType(src), javaType(dst))
+
+ case ArrayLength(_) =>
+ jcode.emitARRAYLENGTH()
+
+ case StartConcat =>
+ jcode emitNEW StringBuilderClassName
+ jcode.emitDUP()
+ jcode.emitINVOKESPECIAL(StringBuilderClassName,
+ JMethod.INSTANCE_CONSTRUCTOR_NAME,
+ JMethodType.ARGLESS_VOID_FUNCTION)
+
+ case StringConcat(el) =>
+ val jtype = el match {
+ case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
+ case _ => javaType(el)
+ }
+ jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
+ "append",
+ new JMethodType(StringBuilderType,
+ Array(jtype)))
+ case EndConcat =>
+ jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
+ "toString",
+ toStringType)
+
+ case _ =>
+ abort("Unimplemented primitive " + primitive)
+ }
}
- }
// genCode starts here
genBlocks(linearization)
@@ -1825,10 +1848,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
- def sizeOf(k: TypeKind): Int = k match {
- case DOUBLE | LONG => 2
- case _ => 1
- }
+ def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
def indexOf(m: IMethod, sym: Symbol): Int = {
val Some(local) = m lookupLocal sym
@@ -1845,9 +1865,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* method. *Does not assume the parameters come first!*
*/
def computeLocalVarsIndex(m: IMethod) {
- var idx = 1
- if (m.symbol.isStaticMember)
- idx = 0;
+ var idx = if (m.symbol.isStaticMember) 0 else 1;
for (l <- m.params) {
debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
@@ -1906,6 +1924,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
((sym.rawflags & (Flags.FINAL | Flags.MODULE)) != 0)
&& !sym.enclClass.isInterface
&& !sym.isClassConstructor
+ && !sym.isMutable // fix for SI-3569, it is too broad?
)
mkFlags(
@@ -1914,9 +1933,10 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (sym.isInterface) ACC_INTERFACE else 0,
if (finalFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
)
}
def javaFieldFlags(sym: Symbol) = {
@@ -1928,9 +1948,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
def isTopLevelModule(sym: Symbol): Boolean =
- atPhase (currentRun.picklerPhase.next) {
- sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
- }
+ afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
def isStaticModule(sym: Symbol): Boolean = {
sym.isModuleClass && !sym.isImplClass && !sym.isLifted
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
index 93d3d19ac8..b74981b999 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
@@ -54,14 +54,6 @@ trait GenJVMUtil {
LE -> JExtendedCode.COND_LE,
GE -> JExtendedCode.COND_GE
)
- val negate = immutable.Map[TestOp, TestOp](
- EQ -> NE,
- NE -> EQ,
- LT -> GE,
- GT -> LE,
- LE -> GT,
- GE -> LT
- )
/** Specialized array conversion to prevent calling
* java.lang.reflect.Array.newInstance via TraversableOnce.toArray
@@ -85,12 +77,10 @@ trait GenJVMUtil {
*/
def javaName(sym: Symbol): String =
javaNameCache.getOrElseUpdate(sym, {
- sym.name.newName(
- if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.javaBinaryName
- else
- sym.javaSimpleName
- )
+ if (sym.isClass || (sym.isModule && !sym.isMethod))
+ sym.javaBinaryName
+ else
+ sym.javaSimpleName
}).toString
def javaType(t: TypeKind): JType = (t: @unchecked) match {
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index d2e54ff3f1..2fb615f893 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -1125,7 +1125,7 @@ abstract class GenMSIL extends SubComponent {
}
// method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType
- val (isDelegateView, paramType, resType) = atPhase(currentRun.typerPhase) {
+ val (isDelegateView, paramType, resType) = beforeTyper {
msym.tpe match {
case MethodType(params, resultType)
if (params.length == 1 && msym.name == nme.view_) =>
@@ -1954,7 +1954,7 @@ abstract class GenMSIL extends SubComponent {
} // createClassMembers0
private def isTopLevelModule(sym: Symbol): Boolean =
- atPhase (currentRun.refchecksPhase) {
+ beforeRefchecks {
sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index e8abee7d06..ff45bb8fd1 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -108,7 +108,7 @@ abstract class ClosureElimination extends SubComponent {
case LOAD_LOCAL(l) if info.bindings isDefinedAt LocalVar(l) =>
val t = info.getBinding(l)
t match {
- case Deref(LocalVar(_)) | Deref(This) | Const(_) =>
+ case Deref(This) | Const(_) =>
bb.replaceInstruction(i, valueToInstruction(t));
log("replaced " + i + " with " + t)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 5fc7329955..95c371fa8b 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -225,9 +225,9 @@ abstract class DeadCodeElimination extends SubComponent {
m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
- for ((i, idx) <- bb.toList.zipWithIndex) {
+ foreachWithIndex(bb.toList) { (i, idx) =>
if (!useful(bb)(idx)) {
- for ((consumedType, depth) <- i.consumedTypes.reverse.zipWithIndex) {
+ foreachWithIndex(i.consumedTypes.reverse) { (consumedType, depth) =>
log("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 66f802f74f..a734b2b92b 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -38,6 +38,33 @@ abstract class Inliners extends SubComponent {
res
}
+ /** Look up implementation of method 'sym in 'clazz'.
+ */
+ def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
+ // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
+ def needsLookup = (
+ (clazz != NoSymbol)
+ && (clazz != sym.owner)
+ && !sym.isEffectivelyFinal
+ && clazz.isEffectivelyFinal
+ )
+ def lookup(clazz: Symbol): Symbol = {
+ // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ if (sym.owner == clazz || isBottomType(clazz)) sym
+ else sym.overridingSymbol(clazz) match {
+ case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
+ case imp => imp
+ }
+ }
+ if (needsLookup) {
+ val concreteMethod = lookup(clazz)
+ debuglog("\tlooked up method: " + concreteMethod.fullName)
+
+ concreteMethod
+ }
+ else sym
+ }
+
/* A warning threshold */
private final val MAX_INLINE_MILLIS = 2000
@@ -67,8 +94,7 @@ abstract class Inliners extends SubComponent {
try {
super.run()
} finally {
- inliner.NonPublicRefs.usesNonPublics.clear()
- inliner.recentTFAs.clear
+ inliner.clearCaches()
}
}
}
@@ -80,6 +106,21 @@ abstract class Inliners extends SubComponent {
def isClosureClass(cls: Symbol): Boolean =
cls.isFinal && cls.isSynthetic && !cls.isModuleClass && cls.isAnonymousFunction
+ /*
+ TODO now that Inliner runs faster we could consider additional "monadic methods" (in the limit, all those taking a closure as last arg)
+ Any "monadic method" occurring in a given caller C that is not `isMonadicMethod()` will prevent CloseElim from eliminating
+ any anonymous-closure-class any whose instances are given as argument to C invocations.
+ */
+ def isMonadicMethod(sym: Symbol) = {
+ nme.unspecializedName(sym.name) match {
+ case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
+ case _ => false
+ }
+ }
+
+ def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
+ def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
+
/**
* Simple inliner.
*/
@@ -92,9 +133,6 @@ abstract class Inliners extends SubComponent {
}
import NonPublicRefs._
- private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
- private def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
-
/** The current iclass */
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
@@ -121,6 +159,21 @@ abstract class Inliners extends SubComponent {
(hasRETURN, a)
}
+ def clearCaches() {
+ // methods
+ NonPublicRefs.usesNonPublics.clear()
+ recentTFAs.clear
+ tfa.knownUnsafe.clear()
+ tfa.knownSafe.clear()
+ tfa.knownNever.clear()
+ // basic blocks
+ tfa.preCandidates.clear()
+ tfa.relevantBBs.clear()
+ // callsites
+ tfa.remainingCALLs.clear()
+ tfa.isOnWatchlist.clear()
+ }
+
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
debuglog("Analyzing " + cls)
@@ -142,7 +195,38 @@ abstract class Inliners extends SubComponent {
val splicedBlocks = mutable.Set.empty[BasicBlock]
val staleIn = mutable.Set.empty[BasicBlock]
+ /**
+ * A transformation local to the body of the argument.
+ * An linining decision consists in replacing a callsite with the body of the callee.
+ * Please notice that, because `analyzeMethod()` itself may modify a method body,
+ * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
+ * (no topological ordering over the call-graph is attempted).
+ *
+ * Making an inlining decision requires type-flow information for both caller and callee.
+ * Regarding the caller, such information is needed only for basic blocks containing inlining candidates
+ * (and their transitive predecessors). This observation leads to using a custom type-flow analysis (MTFAGrowable)
+ * that can be re-inited, i.e. that reuses lattice elements (type-flow information) computed in a previous iteration
+ * as starting point for faster convergence in a new iteration.
+ *
+ * The mechanics of inlining are iterative for a given invocation of `analyzeMethod(m)`,
+ * thus considering the basic blocks that successful inlining added in a previous iteration:
+ *
+ * (1) before the iterations proper start, so-called preinlining is performed.
+ * Those callsites whose (receiver, concreteMethod) are both known statically
+ * can be analyzed for inlining before computing a type-flow. Details in `preInline()`
+ *
+ * (2) the first iteration computes type-flow information for basic blocks containing inlining candidates
+ * (and their transitive predecessors), so called `relevantBBs`.
+ * The ensuing analysis of each candidate (performed by `analyzeInc()`)
+ * may result in a CFG isomorphic to that of the callee being inserted where the callsite was
+ * (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG, which we call "successful inlining").
+ *
+ * (3) following iterations have their relevant basic blocks updated to focus
+ * on the inlined basic blocks and their successors only. Details in `MTFAGrowable.reinit()`
+ * */
def analyzeMethod(m: IMethod): Unit = {
+ // m.normalize
+
var sizeBeforeInlining = m.code.blockCount
var instrBeforeInlining = m.code.instructionCount
var retry = false
@@ -154,17 +238,53 @@ abstract class Inliners extends SubComponent {
val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
val caller = new IMethodInfo(m)
- var info: tfa.lattice.Elem = null
- def analyzeInc(msym: Symbol, i: Instruction, bb: BasicBlock): Boolean = {
- var inlined = false
- def paramTypes = msym.info.paramTypes
- val receiver = (info.stack.types drop paramTypes.length) match {
- case Nil => log("analyzeInc(" + msym + "), no type on the stack!") ; NoSymbol
- case REFERENCE(s) :: _ => s
- case _ => NoSymbol
+ def preInline(isFirstRound: Boolean): Int = {
+ val inputBlocks = caller.m.linearizedBlocks()
+ val callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]] = {
+ if(isFirstRound) tfa.conclusives else tfa.knownBeforehand
}
- val concreteMethod = lookupImplFor(msym, receiver)
+ inlineWithoutTFA(inputBlocks, callsites)
+ }
+
+ /**
+ * Inline straightforward callsites (those that can be inlined without a TFA).
+ *
+ * To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
+ * - callsite and block containing it
+ * - actual (ie runtime) class of the receiver
+ * - actual (ie runtime) method being invoked
+ * - stack length just before the callsite (to check whether enough arguments have been pushed).
+ * The assert below lists the conditions under which "no TFA is needed"
+ * (the statically known receiver and method are both final, thus, at runtime they can't be any others than those).
+ *
+ */
+ def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = {
+ var inlineCount = 0
+ import scala.util.control.Breaks._
+ for(x <- inputBlocks; val easyCake = callsites(x); if easyCake.nonEmpty) {
+ breakable {
+ for(ocm <- easyCake) {
+ assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
+ if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
+ inlineCount += 1
+ break
+ }
+ }
+ }
+ }
+
+ inlineCount
+ }
+
+ /**
+ Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
+ at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
+
+ */
+ def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
+ var inlined = false
+ val msym = i.method
def warnNoInline(reason: String) = {
if (hasInline(msym) && !caller.isBridge)
@@ -209,7 +329,7 @@ abstract class Inliners extends SubComponent {
val inc = new IMethodInfo(callee)
val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- if (pair isStampedForInlining info.stack) {
+ if (pair isStampedForInlining stackLength) {
retry = true
inlined = true
if (isCountable)
@@ -228,9 +348,9 @@ abstract class Inliners extends SubComponent {
}
else {
if (settings.debug.value)
- pair logFailure info.stack
+ pair logFailure stackLength
- warnNoInline(pair failureReason info.stack)
+ warnNoInline(pair failureReason stackLength)
}
case None =>
warnNoInline("bytecode was not available")
@@ -241,38 +361,96 @@ abstract class Inliners extends SubComponent {
if (!isAvailable) "bytecode was not available"
else "it can be overridden"
)
+
inlined
}
- import scala.util.control.Breaks._
+ /* Pre-inlining consists in invoking the usual inlining subroutine with (receiver class, concrete method) pairs as input
+ * where both method and receiver are final, which implies that the receiver computed via TFA will always match `concreteMethod.owner`.
+ *
+ * As with any invocation of `analyzeInc()` the inlining outcome is based on heuristics which favor inlining an isMonadicMethod before other methods.
+ * That's why preInline() is invoked twice: any inlinings downplayed by the heuristics during the first round get an opportunity to rank higher during the second.
+ *
+ * As a whole, both `preInline()` invocations amount to priming the inlining process,
+ * so that the first TFA run afterwards is able to gain more information as compared to a cold-start.
+ */
+ val totalPreInlines = {
+ val firstRound = preInline(true)
+ if(firstRound == 0) 0 else (firstRound + preInline(false))
+ }
+ staleOut.clear()
+ splicedBlocks.clear()
+ staleIn.clear()
+
do {
retry = false
log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
+
+ /* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
+ tfa.callerLin = caller.m.linearizedBlocks()
+ /* TODO Do we want to perform inlining in non-finally exception handlers?
+ * Seems counterproductive (the larger the method the less likely it will be JITed).
+ * The alternative above would be `linearizer.linearizeAt(caller.m, caller.m.startBlock)`.
+ * See also comment on the same topic in TypeFlowAnalysis. */
+
tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
tfa.run
staleOut.clear()
splicedBlocks.clear()
staleIn.clear()
- caller.m.linearizedBlocks() foreach { bb =>
- info = tfa in bb
-
+ import scala.util.control.Breaks._
+ for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {
+ val cms = bb.toList collect { case cm : CALL_METHOD => cm }
breakable {
- for (i <- bb) {
- i match {
- // Dynamic == normal invocations
- // Static(true) == calls to private members
- case CALL_METHOD(msym, Dynamic | Static(true)) if !msym.isConstructor =>
- if (analyzeInc(msym, i, bb)) {
- break
- }
- case _ => ()
+ for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
+ val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
+ if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
+ break
}
- info = tfa.interpret(info, i)
}
}
+ }
+
+ /* As part of inlining, some instructions are moved to a new block.
+ * In detail: the instructions moved to a new block originally appeared after a (by now inlined) callsite.
+ * Their new home is an `afterBlock` created by `doInline()` to that effect.
+ * Each block in staleIn is one such `afterBlock`.
+ *
+ * Some of those instructions may be CALL_METHOD possibly tracked in `remainingCALLs`
+ * (with an entry still noting the old containing block). However, that causes no problem:
+ *
+ * (1) such callsites won't be analyzed for inlining by `analyzeInc()` (*in this iteration*)
+ * because of the `break` that abandons the original basic block where it was contained.
+ *
+ * (2) Additionally, its new containing block won't be visited either (*in this iteration*)
+ * because the new blocks don't show up in the linearization computed before inlinings started:
+ * `for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {`
+ *
+ * For a next iteration, the new home of any instructions that have moved
+ * will be tracked properly in `remainingCALLs` after `MTFAGrowable.reinit()` puts on radar their new homes.
+ *
+ */
+ if(retry) {
+ for(afterBlock <- staleIn) {
+ val justCALLsAfter = afterBlock.toList collect { case c : opcodes.CALL_METHOD => c }
+ for(ia <- justCALLsAfter) { tfa.remainingCALLs.remove(ia) }
+ }
+ }
+ /*
+ if(splicedBlocks.nonEmpty) { // TODO explore (saves time but leads to slightly different inlining decisions)
+ // opportunistically perform straightforward inlinings before the next typeflow round
+ val savedRetry = retry
+ val savedStaleOut = staleOut.toSet; staleOut.clear()
+ val savedStaleIn = staleIn.toSet ; staleIn.clear()
+ val howmany = inlineWithoutTFA(splicedBlocks, tfa.knownBeforehand)
+ splicedBlocks ++= staleIn
+ staleOut.clear(); staleOut ++= savedStaleOut;
+ staleIn.clear(); staleIn ++= savedStaleIn;
+ retry = savedRetry
}
+ */
if (tfa.stat)
log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
@@ -288,15 +466,10 @@ abstract class Inliners extends SubComponent {
}
}
- private def isMonadicMethod(sym: Symbol) = {
- nme.unspecializedName(sym.name) match {
- case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
- case _ => false
- }
- }
-
- private def isHigherOrderMethod(sym: Symbol) =
- sym.isMethod && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists isFunctionType)
+ private def isHigherOrderMethod(sym: Symbol) = (
+ sym.isMethod
+ && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
+ )
/** Should method 'sym' being called in 'receiver' be loaded from disk? */
def shouldLoadImplFor(sym: Symbol, receiver: Symbol): Boolean = {
@@ -308,33 +481,6 @@ abstract class Inliners extends SubComponent {
res
}
- /** Look up implementation of method 'sym in 'clazz'.
- */
- def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
- // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
- def needsLookup = (
- (clazz != NoSymbol)
- && (clazz != sym.owner)
- && !sym.isEffectivelyFinal
- && clazz.isEffectivelyFinal
- )
- def lookup(clazz: Symbol): Symbol = {
- // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
- if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
- }
- if (needsLookup) {
- val concreteMethod = lookup(clazz)
- debuglog("\tlooked up method: " + concreteMethod.fullName)
-
- concreteMethod
- }
- else sym
- }
-
class IMethodInfo(val m: IMethod) {
val sym = m.symbol
val name = sym.name
@@ -386,10 +532,13 @@ abstract class Inliners extends SubComponent {
/** Inline 'inc' into 'caller' at the given block and instruction.
* The instruction must be a CALL_METHOD.
*/
- def doInline(block: BasicBlock, instr: Instruction) {
+ def doInline(block: BasicBlock, instr: CALL_METHOD) {
staleOut += block
+ tfa.remainingCALLs.remove(instr) // this bookkpeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity.
+ tfa.isOnWatchlist.remove(instr) // ditto
+
val targetPos = instr.pos
log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
@@ -403,9 +552,9 @@ abstract class Inliners extends SubComponent {
val activeHandlers = caller.handlers filter (_ covered block)
/* Map 'original' blocks to the ones inlined in the caller. */
- val inlinedBlock: mutable.Map[BasicBlock, BasicBlock] = new mutable.HashMap
+ val inlinedBlock = mutable.Map[BasicBlock, BasicBlock]()
- val varsInScope: mutable.Set[Local] = mutable.HashSet() ++= block.varsInScope
+ val varsInScope = mutable.HashSet[Local]() ++= block.varsInScope
/** Side effects varsInScope when it sees SCOPE_ENTERs. */
def instrBeforeFilter(i: Instruction): Boolean = {
@@ -557,10 +706,11 @@ abstract class Inliners extends SubComponent {
if (settings.debug.value) icodes.checkValid(caller.m)
}
- def isStampedForInlining(stack: TypeStack) =
- !sameSymbols && inc.m.hasCode && shouldInline && isSafeToInline(stack)
+ def isStampedForInlining(stackLength: Int) =
+ !sameSymbols && inc.m.hasCode && shouldInline &&
+ isSafeToInline(stackLength) // `isSafeToInline()` must be invoked last in this AND expr bc it mutates the `knownSafe` and `knownUnsafe` maps for good.
- def logFailure(stack: TypeStack) = log(
+ def logFailure(stackLength: Int) = log(
"""|inline failed for %s:
| pair.sameSymbols: %s
| inc.numInlined < 2: %s
@@ -569,13 +719,14 @@ abstract class Inliners extends SubComponent {
| shouldInline: %s
""".stripMargin.format(
inc.m, sameSymbols, inlinedMethodCount(inc.sym) < 2,
- inc.m.hasCode, isSafeToInline(stack), shouldInline
+ inc.m.hasCode, isSafeToInline(stackLength), shouldInline
)
)
- def failureReason(stack: TypeStack) =
+ def failureReason(stackLength: Int) =
if (!inc.m.hasCode) "bytecode was unavailable"
- else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
+ else if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) "method is synchronized"
+ else if (!isSafeToInline(stackLength)) "it is unsafe (target may reference private fields)"
else "of a bug (run with -Ylog:inline -Ydebug for more information)"
def canAccess(level: NonPublicRefs.Value) = level match {
@@ -587,15 +738,26 @@ abstract class Inliners extends SubComponent {
private def sameOwner = caller.owner == inc.owner
/** A method is safe to inline when:
- * - it does not contain calls to private methods when
- * called from another class
+ * - it does not contain calls to private methods when called from another class
* - it is not inlined into a position with non-empty stack,
* while having a top-level finalizer (see liftedTry problem)
* - it is not recursive
* Note:
* - synthetic private members are made public in this pass.
*/
- def isSafeToInline(stack: TypeStack): Boolean = {
+ def isSafeToInline(stackLength: Int): Boolean = {
+
+ if(tfa.blackballed(inc.sym)) { return false }
+ if(tfa.knownSafe(inc.sym)) { return true }
+
+ if(helperIsSafeToInline(stackLength)) {
+ tfa.knownSafe += inc.sym; true
+ } else {
+ tfa.knownUnsafe += inc.sym; false
+ }
+ }
+
+ private def helperIsSafeToInline(stackLength: Int): Boolean = {
def makePublic(f: Symbol): Boolean =
(inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
debuglog("Making not-private symbol out of synthetic: " + f)
@@ -604,8 +766,8 @@ abstract class Inliners extends SubComponent {
true
}
- if (!inc.m.hasCode || inc.isRecursive)
- return false
+ if (!inc.m.hasCode || inc.isRecursive) { return false }
+ if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) { return false }
val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
// Avoiding crashing the compiler if there are open blocks.
@@ -642,9 +804,10 @@ abstract class Inliners extends SubComponent {
})
canAccess(accessNeeded) && {
- val isIllegalStack = (stack.length > inc.minimumStack && inc.hasNonFinalizerHandler)
+ val isIllegalStack = (stackLength > inc.minimumStack && inc.hasNonFinalizerHandler)
+
!isIllegalStack || {
- debuglog("method " + inc.sym + " is used on a non-empty stack with finalizer. Stack: " + stack)
+ debuglog("method " + inc.sym + " is used on a non-empty stack with finalizer.")
false
}
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 089ef9cf35..176c00c025 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -18,7 +18,7 @@ abstract class Changes {
abstract class Change
- private lazy val annotationsChecked =
+ private lazy val annotationsChecked =
List(definitions.SpecializedClass) // Any others that should be checked?
private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index bd890b7194..02be916f59 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -145,10 +145,8 @@ trait DependencyAnalysis extends SubComponent with Files {
val name = d.toString
d.symbol match {
case s : ModuleClassSymbol =>
- val isTopLevelModule =
- atPhase (currentRun.picklerPhase.next) {
- !s.isImplClass && !s.isNestedClass
- }
+ val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
+
if (isTopLevelModule && (s.companionModule != NoSymbol)) {
dependencies.emits(source, nameToFile(unit.source.file, name))
}
@@ -182,16 +180,18 @@ trait DependencyAnalysis extends SubComponent with Files {
|| (tree.symbol.sourceFile.path != file.path))
&& (!tree.symbol.isClassConstructor)) {
updateReferences(tree.symbol.fullName)
- atPhase(currentRun.uncurryPhase.prev) {
- checkType(tree.symbol.tpe)
- }
+ // was "at uncurryPhase.prev", which is actually non-deterministic
+ // because the continuations plugin may or may not supply uncurry's
+ // immediately preceding phase.
+ beforeRefchecks(checkType(tree.symbol.tpe))
}
tree match {
case cdef: ClassDef if !cdef.symbol.hasPackageFlag &&
!cdef.symbol.isAnonymousFunction =>
if (cdef.symbol != NoSymbol) buf += cdef.symbol
- atPhase(currentRun.erasurePhase.prev) {
+ // was "at erasurePhase.prev"
+ beforeExplicitOuter {
for (s <- cdef.symbol.info.decls)
s match {
case ts: TypeSymbol if !ts.isClass =>
@@ -202,9 +202,8 @@ trait DependencyAnalysis extends SubComponent with Files {
super.traverse(tree)
case ddef: DefDef =>
- atPhase(currentRun.typerPhase.prev) {
- checkType(ddef.symbol.tpe)
- }
+ // was "at typer.prev"
+ beforeTyper { checkType(ddef.symbol.tpe) }
super.traverse(tree)
case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
if (!a.symbol.isConstructor &&
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index c21507ef45..4f05678d85 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -80,9 +80,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"selected.png",
"selected2-right.png",
"selected2.png",
- "unselected.png",
-
- "rootdoc.txt"
+ "unselected.png"
)
/** Generates the Scaladoc site for a model into the site root.
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index ec9f705f5a..496d004fd8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -104,7 +104,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
mb.useCaseOf.get.inDefinitionTemplates
case _ =>
- if (inTpl == null)
+ if (inTpl == null)
makeRootPackage.toList
else
makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
@@ -123,14 +123,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else Public()
}
}
- def flags = {
+ def flags = {
val fgs = mutable.ListBuffer.empty[Paragraph]
if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym.isSealed) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
- fgs.toList
+ fgs.toList
}
def deprecation =
if (sym.isDeprecated)
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index f2d59206e0..1b91b06942 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -10,6 +10,8 @@ import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.{SourceFile, Position, WorkScheduler}
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
+import scala.tools.nsc.util.FailedInterrupt
+import scala.tools.nsc.util.EmptyAction
/** Interface of interactive compiler to a client such as an IDE
* The model the presentation compiler consists of the following parts:
@@ -48,7 +50,7 @@ trait CompilerControl { self: Global =>
/** The scheduler by which client and compiler communicate
* Must be initialized before starting compilerRunner
*/
- protected[interactive] val scheduler = new WorkScheduler
+ @volatile protected[interactive] var scheduler = new WorkScheduler
/** Return the compilation unit attached to a source file, or None
* if source is not loaded.
@@ -374,6 +376,25 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
+ /** A do-nothing work scheduler that responds immediately with MissingResponse.
+ *
+ * Used during compiler shutdown.
+ */
+ class NoWorkScheduler extends WorkScheduler {
+
+ override def postWorkItem(action: Action) = synchronized {
+ action match {
+ case w: WorkItem => w.raiseMissing()
+ case e: EmptyAction => // do nothing
+ case _ => println("don't know what to do with this " + action.getClass)
+ }
+ }
+
+ override def doQuickly[A](op: () => A): A = {
+ throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
+ }
+ }
+
}
// ---------------- Interpreted exceptions -------------------
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 477cec8c8e..166b38f503 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -357,6 +357,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
checkNoResponsesOutstanding()
log.flush();
+ scheduler = new NoWorkScheduler
throw ShutdownReq
}
@@ -609,6 +610,15 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
response raise ex
throw ex
+
+ case ex @ ShutdownReq =>
+ if (debugIDE) {
+ println("ShutdownReq thrown during response")
+ ex.printStackTrace()
+ }
+ response raise ex
+ throw ex
+
case ex =>
if (debugIDE) {
println("exception thrown during response: "+ex)
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index f251fd83fb..bad181eb76 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -22,6 +22,7 @@ import scala.tools.util.PathResolver
* changes require a compilation. It repeats this process until
* a fixpoint is reached.
*/
+@deprecated("Use sbt incremental compilation mechanism", "2.10.0")
class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
@@ -47,7 +48,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
val compiler = newCompiler(settings)
- import compiler.{Symbol, Type, atPhase, currentRun}
+ import compiler.{ Symbol, Type, beforeErasure }
import compiler.dependencyAnalysis.Inherited
private case class SymWithHistory(sym: Symbol, befErasure: Type)
@@ -159,10 +160,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
isCorrespondingSym(s.sym, sym)) match {
case Some(SymWithHistory(oldSym, info)) =>
val changes = changeSet(oldSym.info, sym)
- val changesErasure =
- atPhase(currentRun.erasurePhase.prev) {
- changeSet(info, sym)
- }
+ val changesErasure = beforeErasure(changeSet(info, sym))
+
changesOf(oldSym) = (changes ++ changesErasure).distinct
case _ =>
// a new top level definition
@@ -332,11 +331,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) {
definitions(src) = (localDefs map (s => {
this.classes += s.fullName -> src
- SymWithHistory(
- s.cloneSymbol,
- atPhase(currentRun.erasurePhase.prev) {
- s.info.cloneInfo(s)
- })
+ SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s)))
}))
}
this.references = compiler.dependencyAnalysis.references
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index 9f5fde70d8..68c8f2fdb8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -62,7 +62,7 @@ trait ExprTyper {
else Some(trees)
}
}
- def tokens(line: String) = beSilentDuring(codeParser.tokens(line))
+ def tokens(line: String) = beQuietDuring(codeParser.tokens(line))
// TODO: integrate these into a CodeHandler[Type].
@@ -92,7 +92,7 @@ trait ExprTyper {
case _ => NoType
}
}
-
+
def evaluate(): Type = {
typeOfExpressionDepth += 1
try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 7c71438b98..e1ea69842f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -324,7 +324,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def implicitsCommand(line: String): Result = {
val intp = ILoop.this.intp
import intp._
- import global.Symbol
+ import global.{ Symbol, afterTyper }
def p(x: Any) = intp.reporter.printMessage("" + x)
@@ -348,7 +348,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// This groups the members by where the symbol is defined
val byOwner = syms groupBy (_.owner)
- val sortedOwners = byOwner.toList sortBy { case (owner, _) => intp.afterTyper(source.info.baseClasses indexOf owner) }
+ val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
sortedOwners foreach {
case (owner, members) =>
@@ -382,7 +382,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def findToolsJar() = {
val jdkPath = Directory(jdkHome)
val jar = jdkPath / "lib" / "tools.jar" toFile;
-
+
if (jar isFile)
Some(jar)
else if (jdkPath.isDirectory)
@@ -440,7 +440,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else {
val tp = intp.typeOfExpression(line, false)
if (tp == NoType) "" // the error message was already printed
- else intp.afterTyper(tp.toString)
+ else intp.global.afterTyper(tp.toString)
}
}
private def warningsCommand(): Result = {
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 6ae8d0e7d0..9a12bc1471 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -230,9 +230,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
} with MemberHandlers
import memberHandlers._
- def atPickler[T](op: => T): T = atPhase(currentRun.picklerPhase)(op)
- def afterTyper[T](op: => T): T = atPhase(currentRun.typerPhase.next)(op)
-
/** Temporarily be quiet */
def beQuietDuring[T](body: => T): T = {
val saved = printResults
@@ -787,10 +784,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
- def lineAfterTyper[T](op: => T): T = {
- assert(lastRun != null, "Internal error: trying to use atPhase, but Run is null." + this)
- atPhase(lastRun.typerPhase.next)(op)
- }
/** The innermost object inside the wrapper, found by
* following accessPath into the outer one.
@@ -799,7 +792,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val readRoot = getRequiredModule(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot) { (sym, name) =>
if (name == "") sym else
- lineAfterTyper(sym.info member newTermName(name))
+ afterTyper(sym.info member newTermName(name))
}
}
/** We get a bunch of repeated warnings for reasons I haven't
@@ -842,7 +835,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// private
class Request(val line: String, val trees: List[Tree]) {
val lineRep = new ReadEvalPrint()
- import lineRep.lineAfterTyper
private var _originalLine: String = null
def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
@@ -906,11 +898,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
if (!handlers.last.definesValue) ""
else handlers.last.definesTerm match {
case Some(vname) if typeOf contains vname =>
- """
- |lazy val %s = {
- | %s
- | %s
- |}""".stripMargin.format(lineRep.resultName, lineRep.printName, fullPath(vname))
+ "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
case _ => ""
}
// first line evaluates object to make sure constructor is run
@@ -956,7 +944,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
typesOfDefinedTerms
// compile the result-extraction object
- beSilentDuring {
+ beQuietDuring {
savingSettings(_.nowarn.value = true) {
lineRep compile ResultObjectSourceCode(handlers)
}
@@ -965,7 +953,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
- def applyToResultMember[T](name: Name, f: Symbol => T) = lineAfterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+ def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
index d34ca8bbca..d579e0369e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
@@ -61,7 +61,7 @@ trait Imports {
def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
def implicitSymbols = importedSymbols filter (_.isImplicit)
- def importedTermNamed(name: String): Symbol =
+ def importedTermNamed(name: String): Symbol =
importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
@@ -191,5 +191,5 @@ trait Imports {
prevRequestList flatMap (req => req.handlers map (req -> _))
private def membersAtPickler(sym: Symbol): List[Symbol] =
- atPickler(sym.info.nonPrivateMembers)
+ beforePickler(sym.info.nonPrivateMembers)
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index d96e8b07fc..f9c1907696 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -18,7 +18,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
import global._
import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage, getModuleIfDefined }
type ExecResult = Any
- import intp.{ debugging, afterTyper }
+ import intp.{ debugging }
// verbosity goes up with consecutive tabs
private var verbosity: Int = 0
@@ -61,7 +61,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def packageNames = packages map tos
def aliasNames = aliases map tos
}
-
+
object NoTypeCompletion extends TypeMemberCompletion(NoType) {
override def memberNamed(s: String) = NoSymbol
override def members = Nil
@@ -165,11 +165,11 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
override def follow(id: String): Option[CompletionAware] = {
if (!completions(0).contains(id))
return None
-
+
val tpe = intp typeOfExpression id
if (tpe == NoType)
return None
-
+
def default = Some(TypeMemberCompletion(tpe))
// only rebinding vals in power mode for now.
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index 37dd032135..68bfeafbc6 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -13,7 +13,7 @@ import scala.reflect.internal.Chars
trait MemberHandlers {
val intp: IMain
- import intp.{ Request, global, naming, atPickler }
+ import intp.{ Request, global, naming }
import global._
import naming._
@@ -118,8 +118,9 @@ trait MemberHandlers {
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
private def vparamss = member.vparamss
- // true if 0-arity
- override def definesValue = vparamss.isEmpty || vparamss.head.isEmpty
+ private def isMacro = member.mods.hasFlag(scala.reflect.internal.Flags.MACRO)
+ // true if not a macro and 0-arity
+ override def definesValue = !isMacro && (vparamss.isEmpty || vparamss.head.isEmpty)
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
@@ -199,10 +200,10 @@ trait MemberHandlers {
def importedSymbols = individualSymbols ++ wildcardSymbols
lazy val individualSymbols: List[Symbol] =
- atPickler(individualNames map (targetType nonPrivateMember _))
+ beforePickler(individualNames map (targetType nonPrivateMember _))
lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) atPickler(targetType.nonPrivateMembers)
+ if (importsWildcard) beforePickler(targetType.nonPrivateMembers)
else Nil
/** Complete list of names imported by a wildcard */
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index 835fbb5638..14876425f4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -15,6 +15,31 @@ import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
+/** Collecting some power mode examples.
+
+scala> trait F[@specialized(Int) T] { def f: T = ??? }
+defined trait F
+
+scala> trait G[@specialized(Long, Int) T] extends F[T] { override def f: T = super.f }
+defined trait G
+
+scala> changesAfterEachPhase(intp("G").info.members filter (_.name.toString contains "super")) >
+Gained after 1/parser {
+ method super$f
+}
+
+Gained after 12/specialize {
+ method super$f$mcJ$sp
+ method super$f$mcI$sp
+}
+
+Lost after 18/flatten {
+ method super$f$mcJ$sp
+ method super$f$mcI$sp
+ method super$f
+}
+*/
+
/** A class for methods to be injected into the intp in power mode.
*/
class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: ReplValsImpl) {
@@ -130,7 +155,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
( rutil.info[ReplValsImpl].membersDeclared
filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
- sortBy (_.decodedName)
+ sortBy (_.decodedName)
map to_str
mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
)
@@ -140,7 +165,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None)
}
object InternalInfo extends LowPriorityInternalInfo { }
-
+
/** Now dealing with the problem of acidentally calling a method on Type
* when you're holding a Symbol and seeing the Symbol converted to the
* type of Symbol rather than the type of the thing represented by the
@@ -151,7 +176,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
implicit def apply[T: Manifest] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
}
object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
-
+
}
class InternalInfoWrapper[T: Manifest](value: Option[T] = None) {
def ? : InternalInfo[T] = new InternalInfo[T](value)
@@ -165,7 +190,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
private def newInfo[U: Manifest](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
-
+
/** Standard noise reduction filter. */
def excludeMember(s: Symbol) = (
isSpecialized(s)
@@ -193,7 +218,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
def membersInherited = members filterNot (membersDeclared contains _)
def memberTypes = members filter (_.name.isTypeName)
def memberMethods = members filter (_.isMethod)
-
+
def pkg = symbol.enclosingPackage
def pkgName = pkg.fullName
def pkgClass = symbol.enclosingPackageClass
@@ -318,12 +343,12 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
def sigs = syms map (_.defString)
def infos = syms map (_.info)
}
-
+
trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
-
+
implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
}
trait Implicits2 extends Implicits1 {
@@ -350,7 +375,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
-
+
implicit def liftToTermName(s: String): TermName = newTermName(s)
implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
index dac20ad348..130af990ad 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -14,7 +14,10 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i
// Avoiding deadlock if the compiler starts logging before
// the lazy val is complete.
if (intp.isInitializeComplete) {
- if (intp.totalSilence) ()
+ if (intp.totalSilence) {
+ if (isReplTrace)
+ super.printMessage("[silent] " + msg)
+ }
else super.printMessage(msg)
}
else Console.println("[init] " + msg)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index 6e5dec4205..a68392f0fb 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -50,7 +50,7 @@ object ReplVals {
def mkManifestToType[T <: Global](global: T) = {
import global._
import definitions._
-
+
/** We can't use definitions.manifestToType directly because we're passing
* it to map and the compiler refuses to perform eta expansion on a method
* with a dependent return type. (Can this be relaxed?) To get around this
@@ -59,15 +59,17 @@ object ReplVals {
*/
def manifestToType(m: OptManifest[_]): Global#Type =
definitions.manifestToType(m)
-
+
class AppliedTypeFromManifests(sym: Symbol) {
def apply[M](implicit m1: Manifest[M]): Type =
- appliedType(sym.typeConstructor, List(m1) map (x => manifestToType(x).asInstanceOf[Type]))
+ if (sym eq NoSymbol) NoType
+ else appliedType(sym.typeConstructor, List(m1) map (x => manifestToType(x).asInstanceOf[Type]))
def apply[M1, M2](implicit m1: Manifest[M1], m2: Manifest[M2]): Type =
- appliedType(sym.typeConstructor, List(m1, m2) map (x => manifestToType(x).asInstanceOf[Type]))
+ if (sym eq NoSymbol) NoType
+ else appliedType(sym.typeConstructor, List(m1, m2) map (x => manifestToType(x).asInstanceOf[Type]))
}
-
+
(sym: Symbol) => new AppliedTypeFromManifests(sym)
}
}
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index 5bb8bdda35..80b6e086da 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -165,7 +165,7 @@ object Pickler {
def pkl[T: Pickler] = implicitly[Pickler[T]]
/** A class represenenting `~`-pairs */
- case class ~[S, T](fst: S, snd: T)
+ case class ~[+S, +T](fst: S, snd: T)
/** A wrapper class to be able to use `~` s an infix method */
class TildeDecorator[S](x: S) {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 0c94e40d68..06b06c50a6 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -393,8 +393,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// assumed true unless we see public/private/protected
var isPackageAccess = true
var annots: List[Tree] = Nil
- def addAnnot(sym: Symbol) =
- annots :+= New(TypeTree(sym.tpe), List(Nil))
+ def addAnnot(sym: Symbol) = annots :+= New(sym.tpe)
while (true) {
in.token match {
@@ -654,15 +653,12 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// leaves auxiliary constructors unable to access members of the companion object
// as unqualified identifiers.
def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] = {
- def implWithImport(importStmt: Tree) = {
- import cdef.impl._
- treeCopy.Template(cdef.impl, parents, self, importStmt :: body)
- }
+ def implWithImport(importStmt: Tree) = deriveTemplate(cdef.impl)(importStmt :: _)
// if there are no statics we can use the original cdef, but we always
// create the companion so import A._ is not an error (see ticket #1700)
val cdefNew =
if (statics.isEmpty) cdef
- else treeCopy.ClassDef(cdef, cdef.mods, cdef.name, cdef.tparams, implWithImport(importCompanionObject(cdef)))
+ else deriveClassDef(cdef)(_ => implWithImport(importCompanionObject(cdef)))
List(makeCompanionObject(cdefNew, statics), cdefNew)
}
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 5e46960d04..371f4bc4d8 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -115,6 +115,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
println(fmt.format(xs: _*) + " == " + x)
x
}
+ private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
+ if (settings.debug.value) printing(fmt, xs: _*)(x)
+ else x
+ }
def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n"
def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index d81f05cd51..e1ff88557e 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -198,6 +198,10 @@ trait Matrix extends MatrixAdditions {
class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
def sym = lhs
def tpe = lhs.tpe
+ if (checked)
+ lhs resetFlag NO_EXHAUSTIVE
+ else
+ lhs setFlag NO_EXHAUSTIVE
// See #1427 for an example of a crash which occurs unless we retype:
// in that instance there is an existential in the pattern.
@@ -207,11 +211,6 @@ trait Matrix extends MatrixAdditions {
override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
}
- /** Sets the rhs to EmptyTree, which makes the valDef ignored in Scrutinee.
- */
- def specialVar(lhs: Symbol, checked: Boolean) =
- new PatternVar(lhs, EmptyTree, checked)
-
/** Given a tree, creates a new synthetic variable of the same type
* and assigns the tree to it.
*/
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
index 24d3c38e74..e72a0007a0 100644
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
@@ -131,23 +131,11 @@ trait MatrixAdditions extends ast.TreeDSL {
import Flags.{ MUTABLE, ABSTRACT, SEALED }
- private case class Combo(index: Int, sym: Symbol) {
- val isBaseClass = sym.tpe.baseClasses.toSet
-
- // is this combination covered by the given pattern?
- def isCovered(p: Pattern) = {
- def coversSym = isBaseClass(decodedEqualsType(p.tpe).typeSymbol)
-
- cond(p.tree) {
- case _: UnApply | _: ArrayValue => true
- case x => p.isDefault || coversSym
- }
- }
- }
+ private case class Combo(index: Int, sym: Symbol) { }
/* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
private def rowCoversCombo(row: Row, combos: List[Combo]) =
- row.guard.isEmpty && (combos forall (c => c isCovered row.pats(c.index)))
+ row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
private def requiresExhaustive(sym: Symbol) = {
(sym.isMutable) && // indicates that have not yet checked exhaustivity
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 9d4c9b4411..1285e29d4a 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -745,7 +745,7 @@ trait ParallelMatching extends ast.TreeDSL
(others.head :: _column.tail, make(_tvars, _rows))
def mix() = {
- val newScrut = new Scrutinee(specialVar(_pv.sym, _pv.checked))
+ val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
PatternMatch(newScrut, _ncol) mkRule _nrep
}
}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index 5dd7d8f3ee..56297f0195 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -19,9 +19,10 @@ trait PatternBindings extends ast.TreeDSL
import Debug._
/** EqualsPattern **/
- def isEquals(tpe: Type) = cond(tpe) { case TypeRef(_, EqualsPatternClass, _) => true }
+ def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass
def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
- def decodedEqualsType(tpe: Type) = condOpt(tpe) { case TypeRef(_, EqualsPatternClass, List(arg)) => arg } getOrElse (tpe)
+ def decodedEqualsType(tpe: Type) =
+ if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
// A subtype test which creates fresh existentials for type
// parameters on the right hand side.
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 18409cfffe..8bdf83fda4 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -26,19 +26,6 @@ trait Patterns extends ast.TreeDSL {
type PatternMatch = MatchMatrix#PatternMatch
private type PatternVar = MatrixContext#PatternVar
- // private def unapplyArgs(x: Any) = x match {
- // case UnApply(Apply(TypeApply(_, targs), args), _) => (targs map (_.symbol), args map (_.symbol))
- // case _ => (Nil, Nil)
- // }
- //
- // private def unapplyCall(x: Any) = x match {
- // case UnApply(t, _) => treeInfo.methPart(t).symbol
- // case _ => NoSymbol
- // }
-
- private lazy val dummyMethod =
- NoSymbol.newTermSymbol(newTermName("matching$dummy"))
-
// Fresh patterns
def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
@@ -56,13 +43,14 @@ trait Patterns extends ast.TreeDSL {
case class VariablePattern(tree: Ident) extends NamePattern {
lazy val Ident(name) = tree
require(isVarPattern(tree) && name != nme.WILDCARD)
-
+ override def covers(sym: Symbol) = true
override def description = "%s".format(name)
}
// 8.1.1 (b)
case class WildcardPattern() extends Pattern {
def tree = EmptyTree
+ override def covers(sym: Symbol) = true
override def isDefault = true
override def description = "_"
}
@@ -71,6 +59,8 @@ trait Patterns extends ast.TreeDSL {
case class TypedPattern(tree: Typed) extends Pattern {
lazy val Typed(expr, tpt) = tree
+ override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
+ override def sufficientType = tpt.tpe
override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
override def simplify(pv: PatternVar) = Pattern(expr) match {
case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
@@ -115,6 +105,7 @@ trait Patterns extends ast.TreeDSL {
}
}
+ override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
override def description = backticked match {
case Some(s) => "this." + s
@@ -133,13 +124,15 @@ trait Patterns extends ast.TreeDSL {
case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern?
require(!fn.isType && isModule)
+ override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
override def sufficientType = tpe.narrow
override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
override def description = "Obj(%s)".format(fn)
}
// 8.1.4 (e)
case class SimpleIdPattern(tree: Ident) extends NamePattern {
- lazy val Ident(name) = tree
+ val Ident(name) = tree
+ override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
override def description = "Id(%s)".format(name)
}
@@ -163,6 +156,11 @@ trait Patterns extends ast.TreeDSL {
if (args.isEmpty) this rebindToEmpty tree.tpe
else this
+ override def covers(sym: Symbol) = {
+ debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
+ sym.tpe.typeSymbol == this.tpe.typeSymbol
+ }
+ }
override def description = {
if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
else "%s(%s)".format(name, toPats(args).mkString(", "))
@@ -175,17 +173,12 @@ trait Patterns extends ast.TreeDSL {
// 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
- override def simplify(pv: PatternVar) = {
- if (pv.sym hasFlag NO_EXHAUSTIVE) ()
- else {
- TRACE("Setting NO_EXHAUSTIVE on " + pv.sym + " due to extractor " + tree)
- pv.sym setFlag NO_EXHAUSTIVE
- }
+ private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
+ override def simplify(pv: PatternVar) = {
if (pv.tpe <:< arg.tpe) this
else this rebindTo uaTyped
}
-
override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
}
@@ -208,6 +201,7 @@ trait Patterns extends ast.TreeDSL {
private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
case _ =>
+ val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
Apply(TypeTree(consType), List(hd, tl)) setType consRef
@@ -376,7 +370,7 @@ trait Patterns extends ast.TreeDSL {
case _: This if isVariableName(name) => Some("`%s`".format(name))
case _ => None
}
-
+ override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
protected def getPathSegments(t: Tree): List[Name] = t match {
case Select(q, name) => name :: getPathSegments(q)
case Apply(f, Nil) => getPathSegments(f)
@@ -395,7 +389,13 @@ trait Patterns extends ast.TreeDSL {
lazy val UnApply(unfn, args) = tree
lazy val Apply(fn, _) = unfn
lazy val MethodType(List(arg, _*), _) = fn.tpe
- protected def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
+
+ // Covers if the symbol matches the unapply method's argument type,
+ // and the return type of the unapply is Some.
+ override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
+
+ // TODO: for alwaysCovers:
+ // fn.tpe.finalResultType.typeSymbol == SomeClass
override def necessaryType = arg.tpe
override def subpatternsForVars = args match {
@@ -419,6 +419,7 @@ trait Patterns extends ast.TreeDSL {
else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size)
def isConstructorPattern = fn.isType
+ override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
}
sealed abstract class Pattern extends PatternBindingLogic {
@@ -443,6 +444,15 @@ trait Patterns extends ast.TreeDSL {
// the subpatterns for this pattern (at the moment, that means constructor arguments)
def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
+ // if this pattern should be considered to cover the given symbol
+ def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
+ def newMatchesPattern(sym: Symbol, pattp: Type) = {
+ debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
+ (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
+ (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
+ }
+ }
+
def sym = tree.symbol
def tpe = tree.tpe
def isEmpty = tree.isEmpty
@@ -475,6 +485,7 @@ trait Patterns extends ast.TreeDSL {
final override def toString = description
def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
+ def kindString = ""
}
/*** Extractors ***/
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index f19a285d7c..309fc5733f 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -56,10 +56,10 @@ abstract class Reporter {
*/
def echo(msg: String): Unit = info(NoPosition, msg, true)
def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
-
+
/** Informational messages, suppressed unless -verbose or force=true. */
def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
-
+
/** Warnings and errors. */
def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index b468e7c0af..e7959f36b2 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -95,8 +95,7 @@ class MutableSettings(val errorFn: String => Unit)
*/
def copy(): Settings = {
val s = new Settings()
- val xs = userSetSettings flatMap (_.unparse)
- s.processArguments(xs.toList, true)
+ s.processArguments(recreateArgs, true)
s
}
@@ -534,7 +533,7 @@ class MutableSettings(val errorFn: String => Unit)
Some(rest)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
- override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(" +").toList)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
def unparse: List[String] = value map { name + ":" + _ }
withHelpSyntax(name + ":<" + arg + ">")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index d1ce460eb9..e949cb3eb2 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -167,8 +167,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yreifycopypaste =
BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification.")
- val Yreifytyperdebug
- = BooleanSetting ("-Yreifytyper-debug", "Trace typings of reified trees.")
+ val Ymacrodebug = BooleanSetting ("-Ymacro-debug", "Trace macro-related activities: generation of synthetics, expansion, exceptions.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") .
withPostSetHook(_ => interpreter.replProps.debug setValue true)
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index a47bfda8c1..fb85ebeeb0 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -9,4 +9,4 @@ package symtab
import ast.{Trees, TreePrinters, DocComments}
import util._
-abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
+abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index d8db62a408..1cd4ab21ea 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -368,7 +368,7 @@ abstract class ClassfileParser {
case arr: Type => Constant(arr)
}
}
-
+
private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
val decodedLength = ByteCodecs.decode(bytes)
val arr = new Array[Byte](decodedLength)
@@ -424,19 +424,21 @@ abstract class ClassfileParser {
def forceMangledName(name: Name, module: Boolean): Symbol = {
val parts = name.decode.toString.split(Array('.', '$'))
var sym: Symbol = definitions.RootClass
- atPhase(currentRun.flattenPhase.prev) {
+
+ // was "at flatten.prev"
+ beforeFlatten {
for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
- val sym1 = atPhase(currentRun.icodePhase) {
+ val sym1 = beforeIcode {
sym.linkedClassOfClass.info
sym.info.decl(part.encode)
}//.suchThat(module == _.isModule)
- if (sym1 == NoSymbol)
- sym = sym.info.decl(part.encode.toTypeName)
- else
- sym = sym1
+
+ sym = (
+ if (sym1 ne NoSymbol) sym1
+ else sym.info.decl(part.encode.toTypeName)
+ )
}
}
-// println("found: " + sym)
sym
}
@@ -719,7 +721,7 @@ abstract class ClassfileParser {
index += 1
val bounds = variance match {
case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs)))
- case '-' =>
+ case '-' =>
val tp = sig2type(tparams, skiptvs)
// sig2type seems to return AnyClass regardless of the situation:
// we don't want Any as a LOWER bound.
@@ -1205,11 +1207,11 @@ abstract class ClassfileParser {
// if loading during initialization of `definitions` typerPhase is not yet set.
// in that case we simply load the member at the current phase
if (currentRun.typerPhase != null)
- atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName))
+ beforeTyper(getMember(sym, innerName.toTypeName))
else
getMember(sym, innerName.toTypeName)
- assert(s ne NoSymbol,
+ assert(s ne NoSymbol,
"" + ((externalName, outerName, innerName, sym.fullLocationString)) + " / " +
" while parsing " + ((in.file, busy)) +
sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members
@@ -1269,13 +1271,13 @@ abstract class ClassfileParser {
if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
// See ticket #1687 for an example of when topLevelClass is NoSymbol: it
// apparently occurs when processing v45.3 bytecode.
- if (sym.toplevelClass != NoSymbol)
- sym.privateWithin = sym.toplevelClass.owner
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
// protected in java means package protected. #3946
if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.toplevelClass != NoSymbol)
- sym.privateWithin = sym.toplevelClass.owner
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
}
@inline private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 7d42dabc08..68af518d3a 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -179,7 +179,7 @@ abstract class ICodeReader extends ClassfileParser {
}
else {
forceMangledName(name, false)
- atPhase(currentRun.flattenPhase.next)(definitions.getClass(name))
+ afterFlatten(definitions.getClass(name.toTypeName))
}
if (sym.isModule)
sym.moduleClass
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 25ae6f33d2..758f870d6b 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -77,7 +77,7 @@ abstract class Pickler extends SubComponent {
private var entries = new Array[AnyRef](256)
private var ep = 0
private val index = new LinkedHashMap[AnyRef, Int]
- private lazy val nonClassRoot = root.ownersIterator.find(! _.isClass) getOrElse NoSymbol
+ private lazy val nonClassRoot = findOrElse(root.ownersIterator)(!_.isClass)(NoSymbol)
private def isRootSym(sym: Symbol) =
sym.name.toTermName == rootName && sym.owner == rootOwner
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index e7759f1d7e..71d595c9c4 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -82,7 +82,9 @@ abstract class AddInterfaces extends InfoTransform {
implClassMap.getOrElse(iface, {
atPhase(implClassPhase) {
- log("%s.implClass == %s".format(iface, iface.implClass))
+ if (iface.implClass ne NoSymbol)
+ log("%s.implClass == %s".format(iface, iface.implClass))
+
val implName = nme.implClassName(iface.name)
var impl = if (iface.owner.isClass) iface.owner.info.decl(implName) else NoSymbol
@@ -94,7 +96,6 @@ abstract class AddInterfaces extends InfoTransform {
// error: java.lang.AssertionError: assertion failed: (scala.tools.nsc.typechecker.Contexts$NoContext$,scala.tools.nsc.typechecker.Contexts,NoContext$,trait Contexts in package typechecker) / while parsing (/scala/trunk/build/pack/lib/scala-compiler.jar(scala/tools/nsc/interactive/ContextTrees$class.class),Some(class ContextTrees$class))trait Contexts.NoContext$ linkedModule: <none>List()
val originalImpl = impl
- val originalImplString = originalImpl.hasFlagsToString(-1L)
if (impl != NoSymbol) {
// Unlink a pre-existing symbol only if the implementation class is
// visible on the compilation classpath. In general this is true under
@@ -120,8 +121,8 @@ abstract class AddInterfaces extends InfoTransform {
impl setInfo new LazyImplClassType(iface)
implClassMap(iface) = impl
debuglog(
- "generating impl class " + impl + " " + impl.hasFlagsToString(-1L) + " in " + iface.owner + (
- if (originalImpl == NoSymbol) "" else " (cloned from " + originalImpl.fullLocationString + " " + originalImplString + ")"
+ "generating impl class " + impl.debugLocationString + " in " + iface.owner + (
+ if (originalImpl == NoSymbol) "" else " (cloned from " + originalImpl.debugLocationString + ")"
)
)
impl
@@ -194,7 +195,7 @@ abstract class AddInterfaces extends InfoTransform {
case PolyType(_, restpe) =>
implType(restpe)
}
- sym setInfo implType(atPhase(currentRun.erasurePhase)(iface.info))
+ sym setInfo implType(beforeErasure(iface.info))
}
override def load(clazz: Symbol) { complete(clazz) }
@@ -327,13 +328,11 @@ abstract class AddInterfaces extends InfoTransform {
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
val tree1 = tree match {
- case ClassDef(mods, name, tparams, impl) if (sym.needsImplClass) =>
+ case ClassDef(mods, _, _, impl) if sym.needsImplClass =>
implClass(sym).initialize // to force lateDEFERRED flags
- treeCopy.ClassDef(tree, mods | INTERFACE, name, tparams, ifaceTemplate(impl))
- case DefDef(mods, name, tparams, vparamss, tpt, rhs)
- if (sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass) =>
- treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt,
- addMixinConstructorCalls(rhs, sym.owner)) // (3)
+ copyClassDef(tree)(mods = mods | INTERFACE, impl = ifaceTemplate(impl))
+ case DefDef(_,_,_,_,_,_) if sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass =>
+ deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3)
case Template(parents, self, body) =>
val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
treeCopy.Template(tree, parents1, emptyValDef, body)
@@ -350,7 +349,7 @@ abstract class AddInterfaces extends InfoTransform {
val mix1 = mix
if (mix == tpnme.EMPTY) mix
else {
- val ps = atPhase(currentRun.erasurePhase) {
+ val ps = beforeErasure {
sym.info.parents dropWhile (p => p.symbol.name != mix)
}
assert(!ps.isEmpty, tree);
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 50e6139e65..d04c6115ca 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -33,21 +33,21 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private def savingStatics[T](body: => T): T = {
val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
- val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
+ val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
val result = body
clearStatics()
newStaticMembers ++= savedNewStaticMembers
newStaticInits ++= savedNewStaticInits
symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
-
+
result
}
private def transformTemplate(tree: Tree) = {
val Template(parents, self, body) = tree
clearStatics()
val newBody = transformTrees(body)
- val templ = treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
+ val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
try addStaticInits(templ) // postprocess to include static ctors
finally clearStatics()
}
@@ -85,6 +85,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case "poly-cache" => POLY_CACHE
}
+ def shouldRewriteTry(tree: Try) = {
+ val sym = tree.tpe.typeSymbol
+ forMSIL && (sym != UnitClass) && (sym != NothingClass)
+ }
+
private def typedWithPos(pos: Position)(tree: Tree) =
localTyper.typedPos(pos)(tree)
@@ -97,7 +102,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** The boxed type if it's a primitive; identity otherwise.
*/
def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
-
+
override def transform(tree: Tree): Tree = tree match {
/* Transforms dynamic calls (i.e. calls to methods that are undefined
@@ -134,7 +139,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case ad@ApplyDynamic(qual0, params) =>
if (settings.logReflectiveCalls.value)
unit.echo(ad.pos, "method invocation uses reflection")
-
+
val typedPos = typedWithPos(ad.pos) _
assert(ad.symbol.isPublic)
@@ -146,7 +151,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val flags = PRIVATE | STATIC | SYNTHETIC | (
if (isFinal) FINAL else 0
)
-
+
val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
if (!isFinal)
varSym.addAnnotation(VolatileAttr)
@@ -488,7 +493,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val t: Tree = ad.symbol.tpe match {
case MethodType(mparams, resType) =>
assert(params.length == mparams.length, mparams)
-
+
typedPos {
val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
qual = safeREF(sym)
@@ -560,8 +565,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
* store their result in a local variable. The catch blocks are adjusted as well.
* The try tree is subsituted by a block whose result expression is read of that variable. */
- case theTry @ Try(block, catches, finalizer)
- if theTry.tpe.typeSymbol != definitions.UnitClass && theTry.tpe.typeSymbol != definitions.NothingClass =>
+ case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
val tpe = theTry.tpe.widen
val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
@@ -669,9 +673,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
- case Some(ctor @ DefDef(mods, name, tparams, vparamss, tpt, rhs)) =>
+ case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
// modify existing static ctor
- val newBlock = rhs match {
+ deriveDefDef(ctor) {
case block @ Block(stats, expr) =>
// need to add inits to existing block
treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
@@ -679,15 +683,14 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// need to create a new block with inits and the old term
treeCopy.Block(term, newStaticInits.toList, term)
}
- treeCopy.DefDef(ctor, mods, name, tparams, vparamss, tpt, newBlock)
case None =>
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
- val rhs = Block(newStaticInits.toList, Literal(Constant()))
+ val rhs = Block(newStaticInits.toList, Literal(Constant(())))
localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
- treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
+ deriveTemplate(template)(newCtor :: _)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index b60b411f47..445b21c7ad 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -24,8 +24,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
protected def newTransformer(unit: CompilationUnit): Transformer =
new ConstructorTransformer(unit)
- private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = new mutable.HashMap[Symbol, List[Tree]]
- private val ctorParams: mutable.Map[Symbol, List[Symbol]] = new mutable.HashMap[Symbol, List[Symbol]]
+ private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]
+ private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]
class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
@@ -129,7 +129,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
if (from.name != nme.OUTER) result
else localTyper.typedPos(to.pos) {
- IF (from OBJ_EQ NULL) THEN THROW(NullPointerExceptionClass) ELSE result
+ IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result
}
}
@@ -167,20 +167,18 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
for (stat <- stats) stat match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_,_,_,_,_,rhs) =>
// methods with constant result type get literals as their body
// all methods except the primary constructor go into template
stat.symbol.tpe match {
case MethodType(List(), tp @ ConstantType(c)) =>
- defBuf += treeCopy.DefDef(
- stat, mods, name, tparams, vparamss, tpt,
- Literal(c) setPos rhs.pos setType tp)
+ defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
case _ =>
if (stat.symbol.isPrimaryConstructor) ()
else if (stat.symbol.isConstructor) auxConstructorBuf += stat
else defBuf += stat
}
- case ValDef(mods, name, tpt, rhs) =>
+ case ValDef(_, _, _, rhs) =>
// val defs with constant right-hand sides are eliminated.
// for all other val defs, an empty valdef goes into the template and
// the initializer goes as an assignment into the constructor
@@ -193,7 +191,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
}
- defBuf += treeCopy.ValDef(stat, mods, name, tpt, EmptyTree)
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
case ClassDef(_, _, _, _) =>
// classes are treated recursively, and left in the template
@@ -231,11 +229,11 @@ abstract class Constructors extends Transform with ast.TreeDSL {
tree match {
case DefDef(_, _, _, _, _, body)
if (tree.symbol.isOuterAccessor && tree.symbol.owner == clazz && clazz.isEffectivelyFinal) =>
- log("outerAccessors += " + tree.symbol.fullName)
+ debuglog("outerAccessors += " + tree.symbol.fullName)
outerAccessors ::= ((tree.symbol, body))
case Select(_, _) =>
if (!mustbeKept(tree.symbol)) {
- log("accessedSyms += " + tree.symbol.fullName)
+ debuglog("accessedSyms += " + tree.symbol.fullName)
accessedSyms addEntry tree.symbol
}
super.traverse(tree)
@@ -519,14 +517,9 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
}
- def delayedInitCall(closure: Tree) =
- localTyper.typed {
- atPos(impl.pos) {
- Apply(
- Select(This(clazz), delayedInitMethod),
- List(New(TypeTree(closure.symbol.tpe), List(List(This(clazz))))))
- }
- }
+ def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
+ gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+ }
/** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
def splitAtSuper(stats: List[Tree]) = {
@@ -555,13 +548,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
// Assemble final constructor
- defBuf += treeCopy.DefDef(
- constr, constr.mods, constr.name, constr.tparams, constr.vparamss, constr.tpt,
+ defBuf += deriveDefDef(constr)(_ =>
treeCopy.Block(
constrBody,
paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
guardSpecializedInitializer(remainingConstrStats),
- constrBody.expr));
+ constrBody.expr))
// Followed by any auxiliary constructors
defBuf ++= auxConstructorBuf
@@ -571,14 +563,13 @@ abstract class Constructors extends Transform with ast.TreeDSL {
clazz.info.decls unlink sym
// Eliminate all field definitions that can be dropped from template
- treeCopy.Template(impl, impl.parents, impl.self,
- defBuf.toList filter (stat => mustbeKept(stat.symbol)))
+ deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
} // transformClassTemplate
override def transform(tree: Tree): Tree =
tree match {
- case ClassDef(mods, name, tparams, impl) if !tree.symbol.isInterface && !isValueClass(tree.symbol) =>
- treeCopy.ClassDef(tree, mods, name, tparams, transformClassTemplate(impl))
+ case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isValueClass(tree.symbol) =>
+ deriveClassDef(tree)(transformClassTemplate)
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index efbfe4da41..70364070ff 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -31,6 +31,33 @@ abstract class Erasure extends AddInterfaces
// -------- erasure on types --------------------------------------------------------
+ // A type function from T => Class[U], used to determine the return
+ // type of getClass calls. The returned type is:
+ //
+ // 1. If T is a value type, Class[T].
+ // 2. If T is a phantom type (Any or AnyVal), Class[_].
+ // 3. If T is a local class, Class[_ <: |T|].
+ // 4. Otherwise, Class[_ <: T].
+ //
+ // Note: AnyVal cannot be Class[_ <: AnyVal] because if the static type of the
+ // receiver is AnyVal, it implies the receiver is boxed, so the correct
+ // class object is that of java.lang.Integer, not Int.
+ //
+ // TODO: If T is final, return type could be Class[T]. Should it?
+ def getClassReturnType(tpe: Type): Type = {
+ if (phase.erasedTypes) ClassClass.tpe else {
+ val tp = tpe.widen.normalize
+ val sym = tp.typeSymbol
+
+ if (isValueClass(sym)) ClassType(tp)
+ else boundedClassType(
+ if (isPhantomClass(sym)) ObjectClass.tpe
+ else if (sym.isLocalClass) intersectionDominator(tp.parents)
+ else tp
+ )
+ }
+ }
+
// convert a numeric with a toXXX method
def numericConversion(tree: Tree, numericSym: Symbol): Tree = {
val mname = newTermName("to" + numericSym.name)
@@ -196,7 +223,7 @@ abstract class Erasure extends AddInterfaces
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym0: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = traceSig("superSig", parents) {
@@ -230,7 +257,7 @@ abstract class Erasure extends AddInterfaces
// Anything which could conceivably be a module (i.e. isn't known to be
// a type parameter or similar) must go through here or the signature is
// likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
- def fullNameInSig(sym: Symbol) = "L" + atPhase(currentRun.icodePhase)(sym.javaBinaryName)
+ def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName.toString)
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
@@ -394,9 +421,9 @@ abstract class Erasure extends AddInterfaces
/** Box `tree` of unboxed type */
private def box(tree: Tree): Tree = tree match {
- case LabelDef(name, params, rhs) =>
- val rhs1 = box(rhs)
- treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(box)
+ ldef setType ldef.rhs.tpe
case _ =>
typedPos(tree.pos)(tree.tpe.typeSymbol match {
case UnitClass =>
@@ -413,7 +440,7 @@ abstract class Erasure extends AddInterfaces
* fields (see TupleX). (ID)
*/
case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- log("boxing an unbox: " + tree + " and replying with " + arg)
+ log("boxing an unbox: " + tree + "/" + tree.symbol + " and replying with " + arg + " of type " + arg.tpe)
arg
case _ =>
(REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
@@ -433,9 +460,9 @@ abstract class Erasure extends AddInterfaces
println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
adaptToType(unboxed, pt)
*/
- case LabelDef(name, params, rhs) =>
- val rhs1 = unbox(rhs, pt)
- treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(unbox(_, pt))
+ ldef setType ldef.rhs.tpe
case _ =>
typedPos(tree.pos)(pt.typeSymbol match {
case UnitClass =>
@@ -577,8 +604,8 @@ abstract class Erasure extends AddInterfaces
throw ex
}
def adaptCase(cdef: CaseDef): CaseDef = {
- val body1 = adaptToType(cdef.body, tree1.tpe)
- treeCopy.CaseDef(cdef, cdef.pat, cdef.guard, body1) setType body1.tpe
+ val newCdef = deriveCaseDef(cdef)(adaptToType(_, tree1.tpe))
+ newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
@@ -621,21 +648,20 @@ abstract class Erasure extends AddInterfaces
private def checkNoDoubleDefs(root: Symbol) {
def doubleDefError(sym1: Symbol, sym2: Symbol) {
// the .toString must also be computed at the earlier phase
- def atRefc[T](op: => T) = atPhase[T](currentRun.refchecksPhase.next)(op)
- val tpe1 = atRefc(root.thisType.memberType(sym1))
- val tpe2 = atRefc(root.thisType.memberType(sym2))
+ val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
+ val tpe2 = afterRefchecks(root.thisType.memberType(sym2))
if (!tpe1.isErroneous && !tpe2.isErroneous)
unit.error(
if (sym1.owner == root) sym1.pos else root.pos,
(if (sym1.owner == sym2.owner) "double definition:\n"
else if (sym1.owner == root) "name clash between defined and inherited member:\n"
else "name clash between inherited members:\n") +
- sym1 + ":" + atRefc(tpe1.toString) +
+ sym1 + ":" + afterRefchecks(tpe1.toString) +
(if (sym1.owner == root) "" else sym1.locationString) + " and\n" +
- sym2 + ":" + atRefc(tpe2.toString) +
+ sym2 + ":" + afterRefchecks(tpe2.toString) +
(if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
"\nhave same type" +
- (if (atRefc(tpe1 =:= tpe2)) "" else " after erasure: " + atPhase(phase.next)(sym1.tpe)))
+ (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterErasure(sym1.tpe)))
sym1.setInfo(ErrorType)
}
@@ -645,7 +671,7 @@ abstract class Erasure extends AddInterfaces
if (e.sym.isTerm) {
var e1 = decls.lookupNextEntry(e)
while (e1 ne null) {
- if (atPhase(phase.next)(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
+ if (afterErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
e1 = decls.lookupNextEntry(e1)
}
}
@@ -659,10 +685,10 @@ abstract class Erasure extends AddInterfaces
|| !sym.hasTypeAt(currentRun.refchecksPhase.id))
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
- atPhase(phase.next)(sym1.tpe =:= sym2.tpe)
+ afterErasure(sym1.tpe =:= sym2.tpe)
}
while (opc.hasNext) {
- if (!atPhase(currentRun.refchecksPhase.next)(
+ if (!afterRefchecks(
root.thisType.memberType(opc.overriding) matches
root.thisType.memberType(opc.overridden))) {
debuglog("" + opc.overriding.locationString + " " +
@@ -681,8 +707,8 @@ abstract class Erasure extends AddInterfaces
for (member <- root.info.nonPrivateMember(other.name).alternatives) {
if (member != other &&
!(member hasFlag BRIDGE) &&
- atPhase(phase.next)(member.tpe =:= other.tpe) &&
- !atPhase(refchecksPhase.next)(
+ afterErasure(member.tpe =:= other.tpe) &&
+ !afterRefchecks(
root.thisType.memberType(member) matches root.thisType.memberType(other))) {
debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
doubleDefError(member, other)
@@ -706,13 +732,13 @@ abstract class Erasure extends AddInterfaces
*/
private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
var toBeRemoved: immutable.Set[Symbol] = immutable.Set()
- //println("computing bridges for " + owner)//DEBUG
- assert(phase == currentRun.erasurePhase)
+ debuglog("computing bridges for " + owner)//DEBUG
+ assert(phase == currentRun.erasurePhase, phase)
val site = owner.thisType
val bridgesScope = newScope
val bridgeTarget = new mutable.HashMap[Symbol, Symbol]
var bridges: List[Tree] = List()
- val opc = atPhase(currentRun.explicitouterPhase) {
+ val opc = beforeExplicitOuter {
new overridingPairs.Cursor(owner) {
override def parents: List[Type] = List(owner.info.firstParent)
override def exclude(sym: Symbol): Boolean =
@@ -723,9 +749,9 @@ abstract class Erasure extends AddInterfaces
val member = opc.overriding
val other = opc.overridden
//println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
- if (atPhase(currentRun.explicitouterPhase)(!member.isDeferred)) {
+ if (beforeExplicitOuter(!member.isDeferred)) {
val otpe = erasure(owner, other.tpe)
- val bridgeNeeded = atPhase(phase.next) (
+ val bridgeNeeded = afterErasure (
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
{ var e = bridgesScope.lookupEntry(member.name)
@@ -740,15 +766,15 @@ abstract class Erasure extends AddInterfaces
// the parameter symbols need to have the new owner
bridge.setInfo(otpe.cloneInfo(bridge))
bridgeTarget(bridge) = member
- atPhase(phase.next) { owner.info.decls.enter(bridge) }
+ afterErasure { owner.info.decls.enter(bridge) }
if (other.owner == owner) {
//println("bridge to same: "+other+other.locationString)//DEBUG
- atPhase(phase.next) { owner.info.decls.unlink(other) }
+ afterErasure { owner.info.decls.unlink(other) }
toBeRemoved += other
}
bridgesScope enter bridge
bridges =
- atPhase(phase.next) {
+ afterErasure {
atPos(bridge.pos) {
val bridgeDef =
DefDef(bridge,
@@ -762,7 +788,7 @@ abstract class Erasure extends AddInterfaces
if ( member.isSynthetic // TODO: should we do this for user-defined unapplies as well?
&& ((member.name == nme.unapply) || (member.name == nme.unapplySeq))
// && (bridge.paramss.nonEmpty && bridge.paramss.head.nonEmpty && bridge.paramss.head.tail.isEmpty) // does the first argument list has exactly one argument -- for user-defined unapplies we can't be sure
- && !(atPhase(phase.next)(member.tpe <:< other.tpe))) { // no static guarantees (TODO: is the subtype test ever true?)
+ && !(afterErasure(member.tpe <:< other.tpe))) { // no static guarantees (TODO: is the subtype test ever true?)
import CODE._
val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe, any = true, wrapInApply = true) // any = true since we're before erasure (?), wrapInapply is true since we're after uncurry
// println("unapp type test: "+ typeTest)
@@ -819,11 +845,11 @@ abstract class Erasure extends AddInterfaces
*/
private val preTransformer = new TypingTransformer(unit) {
def preErase(tree: Tree): Tree = tree match {
- case ClassDef(mods, name, tparams, impl) =>
+ case ClassDef(_,_,_,_) =>
debuglog("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
- treeCopy.ClassDef(tree, mods, name, List(), impl)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
+ copyClassDef(tree)(tparams = Nil)
+ case DefDef(_,_,_,_,_,_) =>
+ copyDefDef(tree)(tparams = Nil)
case TypeDef(_, _, _, _) =>
EmptyTree
case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
@@ -1025,7 +1051,7 @@ abstract class Erasure extends AddInterfaces
*/
override def transform(tree: Tree): Tree = {
val tree1 = preTransformer.transform(tree)
- atPhase(phase.next) {
+ afterErasure {
val tree2 = mixinTransformer.transform(tree1)
debuglog("tree after addinterfaces: \n" + tree2)
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 7f7f7e7b65..595c1486b6 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -68,7 +68,7 @@ abstract class ExplicitOuter extends InfoTransform
result
}
-
+
private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER)
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
@@ -89,13 +89,13 @@ abstract class ExplicitOuter extends InfoTransform
def outerAccessor(clazz: Symbol): Symbol = {
val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
- else clazz.info.decls find (_.outerSource == clazz) getOrElse NoSymbol
- }
+ else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
+ }
def newOuterAccessor(clazz: Symbol) = {
val accFlags = SYNTHETIC | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethodSymbol(nme.OUTER, clazz.pos, accFlags)
val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
-
+
sym expandName clazz
sym.referenced = clazz
sym setInfo MethodType(Nil, restpe)
@@ -163,14 +163,14 @@ abstract class ExplicitOuter extends InfoTransform
decls1 = decls.cloneScope
val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
outerAcc expandName clazz
-
+
decls1 enter newOuterAccessor(clazz)
if (hasOuterField(clazz)) //2
decls1 enter newOuterField(clazz)
}
if (!clazz.isTrait && !parents.isEmpty) {
for (mc <- clazz.mixinClasses) {
- val mixinOuterAcc: Symbol = atPhase(phase.next)(outerAccessor(mc))
+ val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
if (decls1 eq decls) decls1 = decls.cloneScope
val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
@@ -468,10 +468,12 @@ abstract class ExplicitOuter extends InfoTransform
}
}
super.transform(
- treeCopy.Template(tree, parents, self,
- if (newDefs.isEmpty) decls else decls ::: newDefs.toList)
+ deriveTemplate(tree)(decls =>
+ if (newDefs.isEmpty) decls
+ else decls ::: newDefs.toList
+ )
)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_, _, _, vparamss, _, rhs) =>
if (sym.isClassConstructor) {
rhs match {
case Literal(_) =>
@@ -484,7 +486,7 @@ abstract class ExplicitOuter extends InfoTransform
sym.newValueParameter(nme.OUTER, sym.pos) setInfo outerField(clazz).info
((ValDef(outerParam) setType NoType) :: vparamss.head) :: vparamss.tail
} else vparamss
- super.transform(treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, rhs))
+ super.transform(copyDefDef(tree)(vparamss = vparamss1))
}
} else
super.transform(tree)
@@ -517,7 +519,7 @@ abstract class ExplicitOuter extends InfoTransform
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
// entry point for pattern matcher translation
- case mch: Match =>
+ case mch: Match if (!opt.virtPatmat) => // don't use old pattern matcher as fallback when the user wants the virtualizing one
matchTranslation(mch)
case _ =>
@@ -559,7 +561,7 @@ abstract class ExplicitOuter extends InfoTransform
/** The transformation method for whole compilation units */
override def transformUnit(unit: CompilationUnit) {
- atPhase(phase.next)(super.transformUnit(unit))
+ afterExplicitOuter(super.transformUnit(unit))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index 4fa5b52de3..8856024a30 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -20,16 +20,14 @@ abstract class Flatten extends InfoTransform {
/** Updates the owning scope with the given symbol; returns the old symbol.
*/
- private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = {
- atPhase(phase.next) {
- val scope = sym.owner.info.decls
- val old = scope lookup sym.name
- if (old ne NoSymbol)
- scope unlink old
+ private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
+ val scope = sym.owner.info.decls
+ val old = scope lookup sym.name
+ if (old ne NoSymbol)
+ scope unlink old
- scope enter sym
- old
- }
+ scope enter sym
+ old
}
private def liftClass(sym: Symbol) {
@@ -53,24 +51,26 @@ abstract class Flatten extends InfoTransform {
val clazz = pre.typeSymbol
clazz.isClass && !clazz.isPackageClass && {
// Cannot flatten here: class A[T] { object B }
- atPhase(currentRun.erasurePhase.prev)(clazz.typeParams.isEmpty)
+ // was "at erasurePhase.prev"
+ beforeErasure(clazz.typeParams.isEmpty)
}
}
private val flattened = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if isFlattenablePrefix(pre) =>
- assert(args.isEmpty && sym.toplevelClass != NoSymbol, sym.ownerChain)
- typeRef(sym.toplevelClass.owner.thisType, sym, Nil)
+ assert(args.isEmpty && sym.enclosingTopLevelClass != NoSymbol, sym.ownerChain)
+ typeRef(sym.enclosingTopLevelClass.owner.thisType, sym, Nil)
case ClassInfoType(parents, decls, clazz) =>
var parents1 = parents
val decls1 = scopeTransform(clazz) {
val decls1 = newScope
if (clazz.isPackageClass) {
- atPhase(phase.next)(decls foreach (decls1 enter _))
- } else {
+ afterFlatten { decls foreach (decls1 enter _) }
+ }
+ else {
val oldowner = clazz.owner
- atPhase(phase.next)(oldowner.info)
+ afterFlatten { oldowner.info }
parents1 = parents mapConserve (this)
for (sym <- decls) {
@@ -102,7 +102,7 @@ abstract class Flatten extends InfoTransform {
class Flattener extends Transformer {
/** Buffers for lifted out classes */
- private val liftedDefs = new mutable.HashMap[Symbol, ListBuffer[Tree]]
+ private val liftedDefs = perRunCaches.newMap[Symbol, ListBuffer[Tree]]()
override def transform(tree: Tree): Tree = {
tree match {
@@ -119,14 +119,10 @@ abstract class Flatten extends InfoTransform {
val sym = tree.symbol
val tree1 = tree match {
case ClassDef(_, _, _, _) if sym.isNestedClass =>
- liftedDefs(sym.toplevelClass.owner) += tree
+ liftedDefs(sym.enclosingTopLevelClass.owner) += tree
EmptyTree
case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
- atPhase(phase.next) {
- atPos(tree.pos) {
- gen.mkAttributedRef(sym)
- }
- }
+ afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym)))
case _ =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 712298bd89..570eaba3a9 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -18,7 +18,7 @@ abstract class LambdaLift extends InfoTransform {
/** the following two members override abstract members in Transform */
val phaseName: String = "lambdalift"
-
+
/** Converts types of captured variables to *Ref types.
*/
def boxIfCaptured(sym: Symbol, tpe: Type, erasedTypes: Boolean) =
@@ -65,17 +65,20 @@ abstract class LambdaLift extends InfoTransform {
/** The set of symbols that need to be renamed. */
private val renamable = newSymSet
- private val renamableImplClasses = mutable.HashMap[Name, Symbol]() withDefaultValue NoSymbol
+ // (trait, name) -> owner
+ private val localTraits = mutable.HashMap[(Symbol, Name), Symbol]()
+ // (owner, name) -> implClass
+ private val localImplClasses = mutable.HashMap[(Symbol, Name), Symbol]()
/** A flag to indicate whether new free variables have been found */
private var changedFreeVars: Boolean = _
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
-
+
/** True if we are transforming under a ReferenceToBoxed node */
private var isBoxedRef = false
-
+
private type SymSet = TreeSet[Symbol]
private def newSymSet = new TreeSet[Symbol](_ isLess _)
@@ -125,7 +128,7 @@ abstract class LambdaLift extends InfoTransform {
if (!ss(sym)) {
ss addEntry sym
renamable addEntry sym
- atPhase(currentRun.picklerPhase) {
+ beforePickler {
// The param symbol in the MethodType should not be renamed, only the symbol in scope. This way,
// parameter names for named arguments are not changed. Example: without cloning the MethodType,
// def closure(x: Int) = { () => x }
@@ -167,8 +170,13 @@ abstract class LambdaLift extends InfoTransform {
// arrangements, and then have separate methods which attempt to compensate
// for that failure. There should be exactly one method for any given
// entity which always gives the right answer.
- if (sym.isImplClass) renamableImplClasses(nme.interfaceName(sym.name)) = sym
- else renamable addEntry sym
+ if (sym.isImplClass)
+ localImplClasses((sym.owner, nme.interfaceName(sym.name))) = sym
+ else {
+ renamable addEntry sym
+ if (sym.isTrait)
+ localTraits((sym, sym.name)) = sym.owner
+ }
}
case DefDef(_, _, _, _, _, _) =>
if (sym.isLocal) {
@@ -213,7 +221,7 @@ abstract class LambdaLift extends InfoTransform {
for (caller <- called.keys ; callee <- called(caller) ; fvs <- free get callee ; fv <- fvs)
markFree(fv, caller)
} while (changedFreeVars)
-
+
def renameSym(sym: Symbol) {
val originalName = sym.name
val base = sym.name + nme.NAME_JOIN_STRING + (
@@ -241,13 +249,20 @@ abstract class LambdaLift extends InfoTransform {
for (sym <- renamable) {
// If we renamed a trait from Foo to Foo$1, we must rename the implementation
// class from Foo$class to Foo$1$class. (Without special consideration it would
- // become Foo$class$1 instead.)
- val implClass = if (sym.isTrait) renamableImplClasses(sym.name) else NoSymbol
- if ((implClass ne NoSymbol) && (sym.owner == implClass.owner)) renameTrait(sym, implClass)
- else renameSym(sym)
+ // become Foo$class$1 instead.) Since the symbols are being renamed out from
+ // under us, and there's no reliable link between trait symbol and impl symbol,
+ // we have maps from ((trait, name)) -> owner and ((owner, name)) -> impl.
+ localTraits remove ((sym, sym.name)) match {
+ case None => renameSym(sym)
+ case Some(owner) =>
+ localImplClasses remove ((owner, sym.name)) match {
+ case Some(implSym) => renameTrait(sym, implSym)
+ case _ => renameSym(sym) // pure interface, no impl class
+ }
+ }
}
- atPhase(phase.next) {
+ afterOwnPhase {
for ((owner, freeValues) <- free.toList) {
val newFlags = SYNTHETIC | ( if (owner.isClass) PARAMACCESSOR | PrivateLocal else PARAM )
debuglog("free var proxy: %s, %s".format(owner.fullLocationString, freeValues.toList.mkString(", ")))
@@ -305,12 +320,13 @@ abstract class LambdaLift extends InfoTransform {
case Some(ps) =>
val freeParams = ps map (p => ValDef(p) setPos tree.pos setType NoType)
tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_, _, _, vparams :: _, _, _) =>
val addParams = cloneSymbols(ps).map(_.setFlag(PARAM))
sym.updateInfo(
lifted(MethodType(sym.info.params ::: addParams, sym.info.resultType)))
- treeCopy.DefDef(tree, mods, name, tparams, List(vparamss.head ++ freeParams), tpt, rhs)
- case ClassDef(mods, name, tparams, impl @ Template(parents, self, body)) =>
+
+ copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
+ case ClassDef(_, _, _, _) =>
// Disabled attempt to to add getters to freeParams
// this does not work yet. Problem is that local symbols need local names
// and references to local symbols need to be transformed into
@@ -322,8 +338,7 @@ abstract class LambdaLift extends InfoTransform {
// DefDef(getter, rhs) setPos tree.pos setType NoType
// }
// val newDefs = if (sym.isTrait) freeParams ::: (ps map paramGetter) else freeParams
- treeCopy.ClassDef(tree, mods, name, tparams,
- treeCopy.Template(impl, parents, self, body ::: freeParams))
+ deriveClassDef(tree)(impl => deriveTemplate(impl)(_ ::: freeParams))
}
case None =>
tree
@@ -404,10 +419,10 @@ abstract class LambdaLift extends InfoTransform {
def refConstr(expr: Tree): Tree = expr match {
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
- case _ =>
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(expr))
+ case _ =>
+ New(sym.tpe, expr)
}
- def refConstrCase(cdef: CaseDef): CaseDef =
+ def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
refConstr(constructorArg)
@@ -452,7 +467,7 @@ abstract class LambdaLift extends InfoTransform {
tree
}
}
-
+
private def preTransform(tree: Tree) = super.transform(tree) setType lifted(tree.tpe)
override def transform(tree: Tree): Tree = tree match {
@@ -461,21 +476,20 @@ abstract class LambdaLift extends InfoTransform {
case _ =>
postTransform(preTransform(tree))
}
-
+
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
- case ClassDef(mods, name, tparams, impl @ Template(parents, self, body)) =>
+ case ClassDef(_, _, _, _) =>
val lifted = liftedDefs get stat.symbol match {
case Some(xs) => xs reverseMap addLifted
case _ => log("unexpectedly no lifted defs for " + stat.symbol) ; Nil
}
- val result = treeCopy.ClassDef(
- stat, mods, name, tparams, treeCopy.Template(impl, parents, self, body ::: lifted))
- liftedDefs -= stat.symbol
- result
- case DefDef(mods, name, tp, vp, tpt, Block(Nil, expr)) if !stat.symbol.isConstructor =>
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, expr)
+ try deriveClassDef(stat)(impl => deriveTemplate(impl)(_ ::: lifted))
+ finally liftedDefs -= stat.symbol
+
+ case DefDef(_, _, _, _, _, Block(Nil, expr)) if !stat.symbol.isConstructor =>
+ deriveDefDef(stat)(_ => expr)
case _ =>
stat
}
@@ -484,7 +498,7 @@ abstract class LambdaLift extends InfoTransform {
override def transformUnit(unit: CompilationUnit) {
computeFreeVars
- atPhase(phase.next)(super.transformUnit(unit))
+ afterOwnPhase(super.transformUnit(unit))
assert(liftedDefs.isEmpty, liftedDefs.keys mkString ", ")
}
} // class LambdaLifter
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index f8c5f5bfc6..85ba539993 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -50,9 +50,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
*/
class LazyValues(unit: CompilationUnit) extends TypingTransformer(unit) {
/** map from method symbols to the number of lazy values it defines. */
- private val lazyVals = new mutable.HashMap[Symbol, Int] {
- override def default(meth: Symbol) = 0
- }
+ private val lazyVals = perRunCaches.newMap[Symbol, Int]() withDefaultValue 0
import symtab.Flags._
import lazyVals._
@@ -70,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
curTree = tree
tree match {
- case DefDef(mods, name, tparams, vparams, tpt, rhs) => atOwner(tree.symbol) {
+ case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
val res = if (!sym.owner.isClass && sym.isLazy) {
val enclosingClassOrDummyOrMethod = {
val enclMethod = sym.enclMethod
@@ -92,11 +90,10 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
} else
super.transform(rhs)
- treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
- if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
+ deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
}
- case Template(parents, self, body) => atOwner(currentOwner) {
+ case Template(_, _, body) => atOwner(currentOwner) {
val body1 = super.transformTrees(body)
var added = false
val stats =
@@ -108,8 +105,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
added = true
typed(addBitmapDefs(sym, stat))
} else stat
- case ValDef(mods, name, tpt, rhs) =>
- typed(treeCopy.ValDef(stat, mods, name, tpt, addBitmapDefs(stat.symbol, rhs)))
+ case ValDef(_, _, _, _) =>
+ typed(deriveValDef(stat)(addBitmapDefs(stat.symbol, _)))
case _ =>
stat
}
@@ -124,29 +121,29 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
})
toAdd0
} else List()
- treeCopy.Template(tree, parents, self, innerClassBitmaps ++ stats)
+ deriveTemplate(tree)(_ => innerClassBitmaps ++ stats)
}
- case ValDef(mods, name, tpt, rhs0) if (!sym.owner.isModule && !sym.owner.isClass) =>
- val rhs = super.transform(rhs0)
- treeCopy.ValDef(tree, mods, name, tpt,
- if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs)
+ case ValDef(_, _, _, _) if !sym.owner.isModule && !sym.owner.isClass =>
+ deriveValDef(tree) { rhs0 =>
+ val rhs = super.transform(rhs0)
+ if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs
+ }
case l@LabelDef(name0, params0, ifp0@If(_, _, _)) if name0.startsWith(nme.WHILE_PREFIX) =>
val ifp1 = super.transform(ifp0)
val If(cond0, thenp0, elsep0) = ifp1
+
if (LocalLazyValFinder.find(thenp0))
- treeCopy.LabelDef(l, name0, params0,
- treeCopy.If(ifp1, cond0, typed(addBitmapDefs(sym.owner, thenp0)), elsep0))
+ deriveLabelDef(l)(_ => treeCopy.If(ifp1, cond0, typed(addBitmapDefs(sym.owner, thenp0)), elsep0))
else
l
- case l@LabelDef(name0, params0, block@Block(stats0, _))
+ case l@LabelDef(name0, params0, block@Block(stats0, expr))
if name0.startsWith(nme.WHILE_PREFIX) || name0.startsWith(nme.DO_WHILE_PREFIX) =>
val stats1 = super.transformTrees(stats0)
if (LocalLazyValFinder.find(stats1))
- treeCopy.LabelDef(l, name0, params0,
- treeCopy.Block(block, typed(addBitmapDefs(sym.owner, stats1.head))::stats1.tail, block.expr))
+ deriveLabelDef(l)(_ => treeCopy.Block(block, typed(addBitmapDefs(sym.owner, stats1.head))::stats1.tail, expr))
else
l
@@ -171,9 +168,9 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
def isMatch(params: List[Ident]) = (params.tail corresponds methSym.tpe.params)(_.tpe == _.tpe)
if (bmps.isEmpty) rhs else rhs match {
- case Block(assign, l @ LabelDef(name, params, rhs1))
+ case Block(assign, l @ LabelDef(name, params, _))
if name.toString == ("_" + methSym.name) && isMatch(params) =>
- Block(assign, treeCopy.LabelDef(l, name, params, typed(prependStats(bmps, rhs1))))
+ Block(assign, deriveLabelDef(l)(rhs => typed(prependStats(bmps, rhs))))
case _ => prependStats(bmps, rhs)
}
@@ -233,9 +230,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
private def mkSetFlag(bmp: Symbol, mask: Tree, bmpRef: Tree): Tree =
bmpRef === (bmpRef INT_| mask)
- val bitmaps = new mutable.HashMap[Symbol, List[Symbol]] {
- override def default(meth: Symbol) = Nil
- }
+ val bitmaps = mutable.Map[Symbol, List[Symbol]]() withDefaultValue Nil
/** Return the symbol corresponding of the right bitmap int inside meth,
* given offset.
@@ -247,7 +242,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
bmps(n)
else {
val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(IntClass.tpe)
- atPhase(currentRun.typerPhase) {
+ beforeTyper {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index b3b7596f9a..c9794cc20f 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -71,7 +71,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* maps all other types to themselves.
*/
private def toInterface(tp: Type): Type =
- atPhase(currentRun.mixinPhase)(tp.typeSymbol.toInterface).tpe
+ beforeMixin(tp.typeSymbol.toInterface).tpe
private def isFieldWithBitmap(field: Symbol) = {
field.info // ensure that nested objects are transformed
@@ -103,7 +103,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
case TypeRef(pre, sym, args) if (sym.isImplClass) =>
- typeRef(pre, atPhase(currentRun.mixinPhase)(sym.toInterface), args)
+ typeRef(pre, beforeMixin(sym.toInterface), args)
case _ => tp
})
}
@@ -123,7 +123,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- atPhase(currentRun.picklerPhase.next) {
+ afterPickler {
var bcs = base.info.baseClasses.dropWhile(mixinClass !=).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
@@ -131,7 +131,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
while (!bcs.isEmpty && sym == NoSymbol) {
if (settings.debug.value) {
val other = bcs.head.info.nonPrivateDecl(member.name);
- log("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
+ debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
" " + other.isDeferred)
}
sym = member.matchingSymbol(bcs.head, base.thisType).suchThat(sym => !sym.hasFlag(DEFERRED | BRIDGE))
@@ -147,7 +147,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
member.hasAccessorFlag && (!member.isDeferred || (member hasFlag lateDEFERRED))
/** Is member overridden (either directly or via a bridge) in base class sequence `bcs`? */
- def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = atPhase(ownPhase) {
+ def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = beforeOwnPhase {
def hasOverridingAccessor(clazz: Symbol) = {
clazz.info.nonPrivateDecl(member.name).alternatives.exists(
sym =>
@@ -155,8 +155,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
!sym.hasFlag(MIXEDIN) &&
matchesType(sym.tpe, member.tpe, true))
}
- bcs.head != member.owner &&
- (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
+ ( bcs.head != member.owner
+ && (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
+ )
}
/** Add given member to given class, and mark member as mixed-in.
@@ -202,7 +203,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
if (needsExpandedSetterName(field))
setter.name = nme.expandedSetterName(setter.name, clazz)
-
+
setter
}
@@ -241,7 +242,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
def addMixedinMembers(clazz: Symbol, unit : CompilationUnit) {
def cloneBeforeErasure(iface: Symbol, clazz: Symbol, imember: Symbol): Symbol = {
- val newSym = atPhase(currentRun.erasurePhase) {
+ val newSym = beforeErasure {
val res = imember.cloneSymbol(clazz)
// since we used the member (imember) from the interface that represents the trait that's being mixed in,
// have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
@@ -337,8 +338,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case _ => // otherwise mixin a field as well
// atPhase: the private field is moved to the implementation class by erasure,
// so it can no longer be found in the member's owner (the trait)
- val accessed = atPhase(currentRun.picklerPhase)(member.accessed)
- val sym = atPhase(currentRun.erasurePhase){ // #3857, need to retain info before erasure when cloning (since cloning only carries over the current entry in the type history)
+ val accessed = beforePickler(member.accessed)
+ val sym = beforeErasure { // #3857, need to retain info before erasure when cloning (since cloning only carries over the current entry in the type history)
clazz.newValue(nme.getterToLocal(member.name), member.pos).setInfo(member.tpe.resultType) // so we have a type history entry before erasure
}
sym.updateInfo(member.tpe.resultType) // info at current phase
@@ -349,13 +350,15 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
setAnnotations accessed.annotations)
}
}
- } else if (member.isSuperAccessor) { // mixin super accessors
+ }
+ else if (member.isSuperAccessor) { // mixin super accessors
val member1 = addMember(clazz, member.cloneSymbol(clazz)) setPos clazz.pos
assert(member1.alias != NoSymbol, member1)
val alias1 = rebindSuper(clazz, member.alias, mixinClass)
member1.asInstanceOf[TermSymbol] setAlias alias1
- } else if (member.isMethod && member.isModule && member.hasNoFlags(LIFTED | BRIDGE)) {
+ }
+ else if (member.isMethod && member.isModule && member.hasNoFlags(LIFTED | BRIDGE)) {
// mixin objects: todo what happens with abstract objects?
addMember(clazz, member.cloneSymbol(clazz, member.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
}
@@ -383,7 +386,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
var parents1 = parents
var decls1 = decls
if (!clazz.isPackageClass) {
- atPhase(phase.next)(clazz.owner.info)
+ afterMixin(clazz.owner.info)
if (clazz.isImplClass) {
clazz setFlag lateMODULE
var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
@@ -449,7 +452,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
&& sym.owner == templ.symbol.owner
&& !sym.isLazy
&& !tree.isDef) {
- log("added use in: " + currentOwner + " -- " + tree)
+ debuglog("added use in: " + currentOwner + " -- " + tree)
usedIn(sym) ::= currentOwner
}
@@ -459,7 +462,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
SingleUseTraverser(templ)
- log("usedIn: " + usedIn)
+ debuglog("usedIn: " + usedIn)
usedIn filter {
case (_, member :: Nil) => member.isValue && member.isLazy
case _ => false
@@ -515,7 +518,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Template(parents, self, body) =>
localTyper = erasure.newTyper(rootContext.make(tree, currentOwner))
- atPhase(phase.next)(currentOwner.owner.info)//todo: needed?
+ afterMixin(currentOwner.owner.info)//todo: needed?
if (!currentOwner.isTrait && !isValueClass(currentOwner))
addMixedinMembers(currentOwner, unit)
@@ -523,18 +526,18 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
addLateInterfaceMembers(currentOwner)
tree
- case DefDef(mods, name, tparams, List(vparams), tpt, rhs) =>
+ case DefDef(_, _, _, vparams :: Nil, _, _) =>
if (currentOwner.isImplClass) {
if (isImplementedStatically(sym)) {
sym setFlag notOVERRIDE
self = sym.newValueParameter(nme.SELF, sym.pos) setInfo toInterface(currentOwner.typeOfThis)
val selfdef = ValDef(self) setType NoType
- treeCopy.DefDef(tree, mods, name, tparams, List(selfdef :: vparams), tpt, rhs)
- } else {
- EmptyTree
+ copyDefDef(tree)(vparamss = List(selfdef :: vparams))
}
- } else {
- if (currentOwner.isTrait && sym.isSetter && !atPhase(currentRun.picklerPhase)(sym.isDeferred)) {
+ else EmptyTree
+ }
+ else {
+ if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) {
sym.addAnnotation(TraitSetterAnnotationClass)
}
tree
@@ -699,15 +702,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* This rhs is typed and then mixin transformed.
*/
def completeSuperAccessor(stat: Tree) = stat match {
- case DefDef(mods, name, tparams, List(vparams), tpt, EmptyTree) if stat.symbol.isSuperAccessor =>
+ case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
- val rhs2 = atPhase(currentRun.mixinPhase)(transform(rhs1))
- debuglog("complete super acc " + stat.symbol.fullLocationString +
- " " + rhs1 + " " + stat.symbol.alias.fullLocationString +
- "/" + stat.symbol.alias.owner.hasFlag(lateINTERFACE))//debug
- treeCopy.DefDef(stat, mods, name, tparams, List(vparams), tpt, rhs2)
+ deriveDefDef(stat)(_ => beforeMixin(transform(rhs1)))
case _ =>
stat
}
@@ -738,7 +737,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def createBitmap: Symbol = {
val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo IntClass.tpe
- atPhase(currentRun.typerPhase)(sym addAnnotation VolatileAttr)
+ beforeTyper(sym addAnnotation VolatileAttr)
category match {
case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
@@ -846,7 +845,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal), UNIT)
- log("nulling fields inside " + lzyVal + ": " + nulls)
+ if (nulls.nonEmpty)
+ log("nulling fields inside " + lzyVal + ": " + nulls)
+
val result = gen.mkDoubleCheckedLocking(clazz, cond, syncBody, nulls)
typedPos(init.head.pos)(BLOCK(result, retVal))
}
@@ -883,14 +884,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
def dd(stat: DefDef) = {
- val DefDef(mods, name, tp, vp, tpt, rhs) = stat
- val sym = stat.symbol
- def isUnit = sym.tpe.resultType.typeSymbol == UnitClass
- def isEmpty = rhs == EmptyTree
+ val sym = stat.symbol
+ def isUnit = sym.tpe.resultType.typeSymbol == UnitClass
+ def isEmpty = stat.rhs == EmptyTree
if (sym.isLazy && !isEmpty && !clazz.isImplClass) {
assert(fieldOffset contains sym, sym)
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt,
+ deriveDefDef(stat)(rhs =>
if (isUnit)
mkLazyDef(clazz, sym, List(rhs), UNIT, fieldOffset(sym))
else {
@@ -901,7 +901,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
else if (needsInitFlag(sym) && !isEmpty && !clazz.hasFlag(IMPLCLASS | TRAIT)) {
assert(fieldOffset contains sym, sym)
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt,
+ deriveDefDef(stat)(rhs =>
(mkCheckedAccessor(clazz, _: Tree, fieldOffset(sym), stat.pos, sym))(
if (sym.tpe.resultType.typeSymbol == UnitClass) UNIT
else rhs
@@ -909,26 +909,24 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
)
}
else if (sym.isConstructor) {
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, addInitBits(clazz, rhs))
+ deriveDefDef(stat)(addInitBits(clazz, _))
}
else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) {
val getter = sym.getter(clazz)
if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt,
- Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter))), UNIT)
- )
+ deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter))), UNIT))
else stat
}
else if (sym.isModule && (!clazz.isTrait || clazz.isImplClass) && !sym.isBridge) {
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt,
- typedPos(stat.pos) {
+ deriveDefDef(stat)(rhs =>
+ typedPos(stat.pos)(
mkInnerClassAccessorDoubleChecked(
// Martin to Hubert: I think this can be replaced by selfRef(tree.pos)
// @PP: It does not seem so, it crashes for me trying to bootstrap.
- if (clazz.isImplClass) gen.mkAttributedIdent(vp.head.head.symbol) else gen.mkAttributedThis(clazz),
+ if (clazz.isImplClass) gen.mkAttributedIdent(stat.vparamss.head.head.symbol) else gen.mkAttributedThis(clazz),
rhs
)
- }
+ )
)
}
else stat
@@ -943,7 +941,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private def checkedGetter(lhs: Tree) = {
val sym = clazz.info decl lhs.symbol.getterName suchThat (_.isGetter)
if (needsInitAndHasOffset(sym)) {
- log("adding checked getter for: " + sym + " " + lhs.symbol.defaultFlagString)
+ debuglog("adding checked getter for: " + sym + " " + lhs.symbol.defaultFlagString)
List(localTyper typed mkSetFlag(clazz, fieldOffset(sym), sym))
}
else Nil
@@ -1130,7 +1128,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private def postTransform(tree: Tree): Tree = {
val sym = tree.symbol
- // assert(tree.tpe ne null, tree.getClass +" : "+ tree +" in "+ localTyper.context.tree)
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
@@ -1163,7 +1160,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def implSym = implClass(sym.owner).info.member(sym.name)
assert(target ne NoSymbol,
List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
- atPhase(phase.prev)(implSym.tpe), phase) mkString " "
+ beforePrevPhase(implSym.tpe), phase) mkString " "
)
typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
}
@@ -1188,7 +1185,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val sym1 = sym.overridingSymbol(currentOwner.enclClass)
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
} else {
- staticCall(atPhase(phase.prev)(sym.overridingSymbol(implClass(sym.owner))))
+ staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
}
} else {
assert(!currentOwner.enclClass.isImplClass, currentOwner.enclClass)
@@ -1237,7 +1234,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val tree1 = super.transform(preTransform(tree))
// localTyper needed when not flattening inner classes. parts after an
// inner class will otherwise be typechecked with a wrong scope
- try atPhase(phase.next)(postTransform(tree1))
+ try afterMixin(postTransform(tree1))
finally localTyper = saved
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 1200e973c5..d8c18c2d50 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -45,8 +45,14 @@ abstract class OverridingPairs {
* Types always match. Term symbols match if their membertypes
* relative to <base>.this do
*/
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
+ protected def matches(sym1: Symbol, sym2: Symbol): Boolean = {
+ def tp_s(s: Symbol) = self.memberType(s) + "/" + self.memberType(s).getClass
+ val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
+ debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format(
+ sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result))
+
+ result
+ }
/** An implementation of BitSets as arrays (maybe consider collection.BitSet
* for that?) The main purpose of this is to implement
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index c1265b39d7..323fecfd0a 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -6,12 +6,9 @@
package scala.tools.nsc
package transform
-
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
-
-
/** Specialize code on types.
*
* Make sure you've read the thesis:
@@ -71,10 +68,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
RootClass, BooleanClass, UnitClass, ArrayClass,
ScalaValueClasses, isValueClass, isScalaValueType,
SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass,
- AnyRefClass, ObjectClass, Predef_AnyRef,
- uncheckedVarianceClass
+ AnyRefClass, ObjectClass, AnyRefModule,
+ GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
}
-
+
/** TODO - this is a lot of maps.
*/
@@ -82,17 +79,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
/** Map a method symbol to a list of its specialized overloads in the same class. */
- private val overloads: mutable.Map[Symbol, List[Overload]] = mutable.HashMap[Symbol, List[Overload]]() withDefaultValue Nil
+ private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil
/** Map a symbol to additional information on specialization. */
- private val info: mutable.Map[Symbol, SpecializedInfo] = perRunCaches.newMap[Symbol, SpecializedInfo]()
+ private val info = perRunCaches.newMap[Symbol, SpecializedInfo]()
/** Map class symbols to the type environments where they were created. */
- private val typeEnv = mutable.HashMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
+ private val typeEnv = perRunCaches.newMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
- // holds mappings from regular type parameter symbols to symbols of
- // specialized type parameters which are subtypes of AnyRef
- private val anyrefSpecCache = perRunCaches.newMap[Symbol, Symbol]()
+ // Key: a specialized class or method
+ // Value: a map from tparams in the original class to tparams in the specialized class.
+ private val anyrefSpecCache = perRunCaches.newMap[Symbol, mutable.Map[Symbol, Symbol]]()
// holds mappings from members to the type variables in the class
// that they were already specialized for, so that they don't get
@@ -100,30 +97,53 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private val wasSpecializedForTypeVars = perRunCaches.newMap[Symbol, Set[Symbol]]() withDefaultValue Set()
/** Concrete methods that use a specialized type, or override such methods. */
- private val concreteSpecMethods = new mutable.HashSet[Symbol]()
+ private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]()
private def isSpecialized(sym: Symbol) = sym hasAnnotation SpecializedClass
private def hasSpecializedFlag(sym: Symbol) = sym hasFlag SPECIALIZED
private def specializedTypes(tps: List[Symbol]) = tps filter isSpecialized
- private def specializedOn(sym: Symbol) = sym getAnnotation SpecializedClass match {
- case Some(AnnotationInfo(_, args, _)) => args
- case _ => Nil
+ private def specializedOn(sym: Symbol): List[Symbol] = {
+ sym getAnnotation SpecializedClass match {
+ case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
+ case Some(ann @ AnnotationInfo(_, args, _)) => {
+ args map (_.tpe) flatMap { tp =>
+ tp baseType GroupOfSpecializable match {
+ case TypeRef(_, GroupOfSpecializable, arg :: Nil) =>
+ arg.typeArgs map (_.typeSymbol)
+ case _ =>
+ List(tp.typeSymbol)
+ }
+ }
+ }
+ case _ => Nil
+ }
}
// If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
- private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = (
- specializedOn(sym).exists(_.symbol == Predef_AnyRef) // specialized on AnyRef
- && !isValueClass(tp.typeSymbol)
- && isBoundedGeneric(tp)
- )
+ private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
+ specializedOn(sym).exists(s => !isValueClass(s)) &&
+ !isValueClass(tp.typeSymbol) &&
+ isBoundedGeneric(tp)
+ //(tp <:< AnyRefClass.tpe)
+ }
private def isBoundedGeneric(tp: Type) = tp match {
case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
case TypeRef(_, sym, _) => !isValueClass(sym)
case _ => false
}
+ def unspecializedSymbol(sym: Symbol): Symbol = {
+ if (sym hasFlag SPECIALIZED) {
+ // add initialization from its generic class constructor
+ val genericName = nme.unspecializedName(sym.name)
+ val member = sym.owner.info.decl(genericName.toTypeName)
+ member
+ }
+ else NoSymbol
+ }
+
object TypeEnv {
/** Return a new type environment binding specialized type parameters of sym to
* the given args. Expects the lists to have the same length.
@@ -241,7 +261,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val stvTypeParams = specializedTypeVars(target.info.typeParams map (_.info))
val stvResult = specializedTypeVars(target.info.resultType)
- log("degenerate: " + target + " stv tparams: " + stvTypeParams + " stv info: " + stvResult)
+ debuglog("degenerate: " + target + " stv tparams: " + stvTypeParams + " stv info: " + stvResult)
(stvTypeParams -- stvResult).nonEmpty
}
@@ -272,10 +292,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val pre1 = this(pre)
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
- val args1 = map2(args, sym.typeParams) {
- case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe
- case (tp, _) => tp
- }
+ val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
+ if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+ else tp
+ )
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
case Some(sym1) => typeRef(pre1, sym1, survivingArgs(sym, args))
case None => typeRef(pre1, sym, args)
@@ -315,35 +335,38 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
- val abbrevs = definitions.abbrvTag withDefaultValue definitions.abbrvTag(ObjectClass)
newTermName(base.toString + "$"
- + "m" + ms + types1.map(t => abbrevs(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => abbrevs(t.typeSymbol)).mkString("", "", "$sp"))
+ + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
+ + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
}
}
- lazy val primitiveTypes = ScalaValueClasses map (_.tpe)
+ lazy val specializableTypes = (ScalaValueClasses :+ AnyRefClass) map (_.tpe) sorted
+
+ /** If the symbol is the companion of a value class, the value class.
+ * Otherwise, AnyRef.
+ */
+ def specializesClass(sym: Symbol): Symbol = {
+ val c = sym.companionClass
+ if (isValueClass(c)) c else AnyRefClass
+ }
/** Return the types `sym` should be specialized at. This may be some of the primitive types
* or AnyRef. AnyRef means that a new type parameter T will be generated later, known to be a
* subtype of AnyRef (T <: AnyRef).
* These are in a meaningful order for stability purposes.
*/
- def concreteTypes(sym: Symbol): List[Type] = (
- if (!isSpecialized(sym)) Nil // no @specialized Annotation
- else specializedOn(sym) match {
- case Nil => primitiveTypes // specialized on everything
- case args => // specialized on args
- (args map { tp =>
- if (tp.symbol == Predef_AnyRef) {
- if (isBoundedGeneric(sym.tpe))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
- AnyRefClass.tpe
- }
- else tp.symbol.companionClass.tpe
- }).sorted
- }
- )
+ def concreteTypes(sym: Symbol): List[Type] = {
+ val types = if (!isSpecialized(sym))
+ Nil // no @specialized Annotation
+ else
+ specializedOn(sym) map (s => specializesClass(s).tpe) sorted
+
+ if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+
+ types
+ }
/** Return a list of all type environments for all specializations
* of @specialized types in `tps`.
@@ -357,8 +380,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case set :: Nil => set map (x => List(x))
case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
}
- // zip the keys with each permutation to create a TypeEnv
- loop(keys map concreteTypes) map (xss => Map(keys zip xss: _*))
+ // zip the keys with each permutation to create a TypeEnv.
+ // If we don't exclude the "all AnyRef" specialization, we will
+ // incur duplicate members and crash during mixin.
+ loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -382,8 +407,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tpes foreach (tp => buf ++= specializedTypeVars(tp))
buf.result
}
- def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] =
- atPhase(currentRun.typerPhase)(specializedTypeVars(sym.info))
+ def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info))
/** Return the set of @specialized type variables mentioned by the given type.
* It only counts type variables that appear:
@@ -415,27 +439,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => Set()
}
- /** Returns the type parameter in the specialized class `clazz` that corresponds to type parameter
- * `sym` in the original class. It will create it if needed or use the one from the cache.
+ /** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
+ * `tparam` in the original class. It will create it if needed or use the one from the cache.
*/
- private def typeParamSubAnyRef(sym: Symbol, clazz: Symbol) = (
- anyrefSpecCache.getOrElseUpdate(sym,
- clazz.newTypeParameter(sym.name append nme.SPECIALIZED_SUFFIX_NAME toTypeName, sym.pos)
- setInfo TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe)
+ private def typeParamSubAnyRef(tparam: Symbol, sClass: Symbol): Type = {
+ val sClassMap = anyrefSpecCache.getOrElseUpdate(sClass, mutable.Map[Symbol, Symbol]())
+
+ sClassMap.getOrElseUpdate(tparam,
+ tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
+ modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
).tpe
- )
+ }
/** Cleans the anyrefSpecCache of all type parameter symbols of a class.
*/
- private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) = (
+ private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) {
// remove class type parameters and those of normalized members.
- clazz :: decls foreach {
- _.tpe match {
- case PolyType(tparams, _) => anyrefSpecCache --= tparams
- case _ => ()
- }
- }
- )
+ clazz :: decls foreach (anyrefSpecCache remove _)
+ }
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
@@ -490,11 +511,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* was both already used for a map and mucho long. So "sClass" is the
* specialized subclass of "clazz" throughout this file.
*/
- val sClass = clazz.owner.newClass(specializedName(clazz, env0).toTypeName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE)
+
+ // SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is
+ // to force .info on them, as their lazy type will be evaluated and the symbols will be eliminated. Unfortunately
+ // evaluating the info after creating the specialized class will mess the specialized class signature, so we'd
+ // better evaluate it before creating the new class symbol
+ val clazzName = specializedName(clazz, env0).toTypeName
+ val bytecodeClazz = clazz.owner.info.decl(clazzName)
+ debuglog("Specializing " + clazz + " found " + bytecodeClazz + " already there")
+ bytecodeClazz.info
+
+ val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE)
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long) =
member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED))
-
+
sClass.sourceFile = clazz.sourceFile
currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
@@ -527,7 +558,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
var res: List[Type] = Nil
// log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
for (p <- parents) {
- val stp = atPhase(phase.next)(specializedType(p))
+ val stp = afterSpecialize(specializedType(p))
if (stp != p)
if (p.typeSymbol.isTrait) res ::= stp
else if (currentRun.compiles(clazz))
@@ -537,7 +568,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
res
}
- var parents = List(applyContext(atPhase(currentRun.typerPhase)(clazz.tpe)))
+ var parents = List(applyContext(beforeTyper(clazz.tpe)))
// log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
@@ -551,7 +582,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// as with the parents and assign it to typeOfThis.
if (clazz.typeOfThis.typeConstructor ne clazz.typeConstructor) {
sClass.typeOfThis = applyContext(clazz.typeOfThis)
- log("Rewriting self-type for specialized class:\n" +
+ debuglog("Rewriting self-type for specialized class:\n" +
" " + clazz.defStringSeenAs(clazz.typeOfThis) + "\n" +
" => " + sClass.defStringSeenAs(sClass.typeOfThis)
)
@@ -559,7 +590,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
}
- atPhase(phase.next)(sClass setInfo specializedInfoType)
+ afterSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
/** Enter 'sym' in the scope of the current specialized class. It's type is
@@ -633,7 +664,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
})
}
else
- log("conflicting env for " + m + " env: " + env)
+ debuglog("conflicting env for " + m + " env: " + env)
}
else if (m.isDeferred) { // abstract methods
val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED))
@@ -702,7 +733,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
typeEnv(specClass) = fullEnv
specClass.name = specializedName(specClass, fullEnv).toTypeName
enterMember(specClass)
- log("entered specialized class " + specClass.fullName)
+ debuglog("entered specialized class " + specClass.fullName)
info(specClass) = SpecializedInnerClass(m, fullEnv)
}
}
@@ -739,7 +770,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (existing != NoSymbol)
clazz.owner.info.decls.unlink(existing)
- atPhase(phase.next)(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
+ afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
}
if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
cleanAnyRefSpecCache(clazz, decls1)
@@ -758,7 +789,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
debuglog("normalizeMember: " + sym.fullName)
sym :: (
- if (!sym.isMethod || atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) Nil
+ if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
else {
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
@@ -816,10 +847,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (sym.isPrivate/* || sym.isProtected*/) {
//sym.privateWithin = sym.enclosingPackage
sym.resetFlag(PRIVATE).setFlag(PROTECTED)
- log("-->d SETTING PRIVATE WITHIN TO " + sym.enclosingPackage + " for " + sym)
+ debuglog("-->d SETTING PRIVATE WITHIN TO " + sym.enclosingPackage + " for " + sym)
}
- sym.resetFlag(FINAL)
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -900,7 +930,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
checkOverriddenTParams(overridden)
val env = unify(overridden.info, overriding.info, emptyEnv, false)
- def atNext = atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env)))
+ def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
debuglog("\t\tenv: " + env + "isValid: " + TypeEnv.isValid(env, overridden) + "found: " + atNext)
if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol)
@@ -915,7 +945,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case (NoSymbol, _) => None
case (overridden, env) =>
val om = specializedOverload(clazz, overridden, env)
- log("Added specialized overload %s for %s in env: %s with type: %s".format(om, overriding.fullName, env, om.info))
+ debuglog("Added specialized overload %s for %s in env: %s with type: %s".format(om, overriding.fullName, env, om.info))
typeEnv(om) = env
addConcreteSpecMethod(overriding)
info(om) = (
@@ -940,7 +970,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
)
overloads(overriding) ::= Overload(om, env)
- ifDebug(atPhase(phase.next)(assert(
+ ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
)
@@ -950,6 +980,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case object UnifyError extends scala.util.control.ControlThrowable
+ private[this] def unifyError(tp1: Any, tp2: Any): Nothing = {
+ log("unifyError" + ((tp1, tp2)))
+ throw UnifyError
+ }
/** Return the most general type environment that specializes tp1 to tp2.
* It only allows binding of type parameters annotated with @specialized.
@@ -960,29 +994,34 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean): TypeEnv = (tp1, tp2) match {
case (TypeRef(_, sym1, _), _) if isSpecialized(sym1) =>
debuglog("Unify - basic case: " + tp1 + ", " + tp2)
- if (isValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
+ if (isValueClass(tp2.typeSymbol))
env + ((sym1, tp2))
+ else if (isSpecializedAnyRefSubtype(tp2, sym1))
+ env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+ else if (strict)
+ unifyError(tp1, tp2)
else
- if (strict) throw UnifyError else env
+ env
case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) =>
debuglog("Unify TypeRefs: " + tp1 + " and " + tp2 + " with args " + (args1, args2) + " - ")
- if (strict && args1.length != args2.length) throw UnifyError
+ if (strict && args1.length != args2.length) unifyError(tp1, tp2)
val e = unify(args1, args2, env, strict)
debuglog("unified to: " + e)
e
case (TypeRef(_, sym1, _), _) if sym1.isTypeParameterOrSkolem =>
env
case (MethodType(params1, res1), MethodType(params2, res2)) =>
- if (strict && params1.length != params2.length) throw UnifyError
+ if (strict && params1.length != params2.length) unifyError(tp1, tp2)
debuglog("Unify MethodTypes: " + tp1 + " and " + tp2)
unify(res1 :: (params1 map (_.tpe)), res2 :: (params2 map (_.tpe)), env, strict)
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- if (strict && tparams1.length != tparams2.length) throw UnifyError
debuglog("Unify PolyTypes: " + tp1 + " and " + tp2)
- unify(res1, res2, env, strict)
- case (PolyType(_, res), other) =>
- unify(res, other, env, strict)
- case (ThisType(_), ThisType(_)) => env
+ if (strict && tparams1.length != tparams2.length)
+ unifyError(tp1, tp2)
+ else
+ unify(res1, res2, env, strict)
+ case (PolyType(_, res), other) => unify(res, other, env, strict)
+ case (ThisType(_), ThisType(_)) => env
case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
@@ -1004,7 +1043,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (env.keySet intersect nenv.keySet isEmpty) env ++ nenv
else {
debuglog("could not unify: u(" + args._1 + ", " + args._2 + ") yields " + nenv + ", env: " + env)
- throw UnifyError
+ unifyError(tp1, tp2)
}
}
}
@@ -1082,7 +1121,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) =>
val tparams = tpe.typeParams
if (tparams.isEmpty)
- atPhase(phase.next)(parents map (_.typeSymbol.info))
+ afterSpecialize(parents map (_.typeSymbol.info))
val parents1 = parents map specializedType
debuglog("transformInfo %s %s with parents1 %s ph: %s".format(
@@ -1128,7 +1167,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (warnings)
reporter.warning(tvar.pos, "Bounds prevent specialization of " + tvar)
- log("specvars: " +
+ debuglog("specvars: " +
tvar.info.bounds.lo + ": " +
specializedTypeVars(tvar.info.bounds.lo) + " " +
subst(env, tvar.info.bounds.hi) + ": " +
@@ -1197,27 +1236,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else NoSymbol
def illegalSpecializedInheritance(clazz: Symbol): Boolean = (
- hasSpecializedFlag(clazz)
+ hasSpecializedFlag(clazz)
&& originalClass(clazz).parentSymbols.exists(p => hasSpecializedParams(p) && !p.isTrait)
)
def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) {
/** Map a specializable method to it's rhs, when not deferred. */
- val body: mutable.Map[Symbol, Tree] = new mutable.HashMap
+ val body = perRunCaches.newMap[Symbol, Tree]()
/** Map a specializable method to its value parameter symbols. */
- val parameters: mutable.Map[Symbol, List[List[Symbol]]] = new mutable.HashMap
+ val parameters = perRunCaches.newMap[Symbol, List[Symbol]]()
/** Collect method bodies that are concrete specialized methods.
*/
class CollectMethodBodies extends Traverser {
override def traverse(tree: Tree) = tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_, _, _, vparams :: Nil, _, rhs) =>
if (concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor) {
debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
body(tree.symbol) = rhs
// body(tree.symbol) = tree // whole method
- parameters(tree.symbol) = mmap(vparamss)(_.symbol)
+ parameters(tree.symbol) = vparams.map(_.symbol)
concreteSpecMethods -= tree.symbol
} // no need to descend further down inside method bodies
@@ -1230,7 +1269,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- def doesConform(origSymbol: Symbol, treeType: Type, memberType: Type, env: TypeEnv) =
+ def doesConform(origSymbol: Symbol, treeType: Type, memberType: Type, env: TypeEnv) = {
(treeType =:= memberType) || { // anyref specialization
memberType match {
case PolyType(_, resTpe) =>
@@ -1247,6 +1286,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => false
}
}
+ }
override def transform(tree: Tree): Tree = {
val symbol = tree.symbol
@@ -1254,20 +1294,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
def specSym(qual: Tree): Option[Symbol] = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- log("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
+ debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
.format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
if (!env.isEmpty) { // a method?
val specCandidates = qual.tpe.member(specializedName(symbol, env))
val specMember = specCandidates suchThat { s =>
doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
}
-
- log("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
- log("[specSym] found specMember: " + specMember)
+
+ debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
+ debuglog("[specSym] found specMember: " + specMember)
if (specMember ne NoSymbol)
if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember)
else {
- log("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
+ debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
None
}
else None
@@ -1277,16 +1317,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- if (findSpec(tpt.tpe).typeSymbol ne tpt.tpe.typeSymbol) {
+ debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ val found = findSpec(tpt.tpe)
+ if (found.typeSymbol ne tpt.tpe.typeSymbol) {
// the ctor can be specialized
- log("** instantiated specialized type: " + findSpec(tpt.tpe))
- try {
- atPos(tree.pos)(
- localTyper.typed(
- Apply(
- Select(New(TypeTree(findSpec(tpt.tpe))), nme.CONSTRUCTOR),
- transformTrees(args))))
- } catch {
+ debuglog("** instantiated specialized type: " + found)
+ try localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
+ catch {
case te: TypeError =>
reporter.error(tree.pos, te.msg)
super.transform(tree)
@@ -1313,7 +1350,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
)
val tree1 = gen.mkTypeApply(Select(qual1, specMember), residualTargs)
- log("rewrote " + tree + " to " + tree1)
+ debuglog("rewrote " + tree + " to " + tree1)
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
case None => super.transform(tree)
@@ -1321,8 +1358,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case Select(Super(_, _), name) if illegalSpecializedInheritance(currentClass) =>
val pos = tree.pos
- log(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- log(pos.lineContent)
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
+ debuglog(pos.lineContent)
tree
case Select(qual, name) =>
@@ -1378,13 +1415,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
(new CollectMethodBodies)(tree)
val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) =>
TypeTree(tpe) setPos parent.pos)
-
+
treeCopy.Template(tree,
parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
self,
atOwner(currentOwner)(transformTrees(body ::: specMembers)))
- case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) if info.isDefinedAt(symbol) =>
+ case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
if (symbol.isConstructor) {
@@ -1392,116 +1429,96 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val superRef: Tree = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
forwardCtorCall(tree.pos, superRef, vparamss, symbol.owner)
}
- if (symbol.isPrimaryConstructor) localTyper typed {
- atPos(symbol.pos)(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, Block(List(t), Literal(Constant()))))
- } else {
- // duplicate the original constructor
+ if (symbol.isPrimaryConstructor)
+ localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
+ else // duplicate the original constructor
duplicateBody(ddef, info(symbol).target)
- }
- } else info(symbol) match {
-
+ }
+ else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
debuglog("implementation: " + tree1)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
- treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
+ deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- log("Normalized member: " + symbol + ", target: " + target)
+ debuglog("Normalized member: " + symbol + ", target: " + target)
if (target.isDeferred || conflicting(typeEnv(symbol))) {
- treeCopy.DefDef(
- tree, mods, name, tparams, vparamss, tpt,
- localTyper typed gen.mkSysErrorCall("boom! you stepped on a bug. This method should never be called.")
- )
+ deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
}
else {
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
debuglog("implementation: " + tree1)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
- treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
+ deriveDefDef(tree1)(transform)
}
case SpecialOverride(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
//debuglog("moving implementation, body of target " + target + ": " + body(target))
- log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
+ debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
// we have an rhs, specialize it
val tree1 = addBody(ddef, target)
(new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
debuglog("changed owners, now: " + tree1)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
- treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
-
+ deriveDefDef(tree1)(transform)
case SpecialOverload(original, env) =>
debuglog("completing specialized " + symbol.fullName + " calling " + original)
- log("special overload " + original + " -> " + env)
+ debuglog("special overload " + original + " -> " + env)
val t = DefDef(symbol, { vparamss =>
val fun = Apply(Select(This(symbol.owner), original),
makeArguments(original, vparamss.head))
- log("inside defdef: " + symbol + "; type: " + symbol.tpe + "; owner: " + symbol.owner)
+ debuglog("inside defdef: " + symbol + "; type: " + symbol.tpe + "; owner: " + symbol.owner)
gen.maybeMkAsInstanceOf(fun,
symbol.owner.thisType.memberType(symbol).finalResultType,
symbol.owner.thisType.memberType(original).finalResultType)
})
- log("created special overload tree " + t)
+ debuglog("created special overload tree " + t)
debuglog("created " + t)
localTyper.typed(t)
case fwd @ Forward(_) =>
- log("forward: " + fwd + ", " + ddef)
+ debuglog("forward: " + fwd + ", " + ddef)
val rhs1 = forwardCall(tree.pos, gen.mkAttributedRef(symbol.owner.thisType, fwd.target), vparamss)
- log("-->d completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
- localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
+ debuglog("-->d completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
+ localTyper.typed(deriveDefDef(tree)(_ => rhs1))
case SpecializedAccessor(target) =>
val rhs1 = if (symbol.isGetter)
gen.mkAttributedRef(target)
else
Assign(gen.mkAttributedRef(target), Ident(vparamss.head.head.symbol))
- log("specialized accessor: " + target + " -> " + rhs1)
- localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
+ debuglog("specialized accessor: " + target + " -> " + rhs1)
+ localTyper.typed(deriveDefDef(tree)(_ => rhs1))
case Abstract(targ) =>
- log("abstract: " + targ)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree
- val t = treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs)
- localTyper.typed(t)
+ debuglog("abstract: " + targ)
+ localTyper.typed(deriveDefDef(tree)(rhs => rhs))
}
- case ValDef(mods, name, tpt, rhs) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
+ case ValDef(_, _, _, _) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
assert(body.isDefinedAt(symbol.alias), body)
- val tree1 = treeCopy.ValDef(tree, mods, name, tpt, body(symbol.alias).duplicate)
+ val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
+
val d = new Duplicator
- val ValDef(mods1, name1, tpt1, rhs1) = d.retyped(
+ val newValDef = d.retyped(
localTyper.context1.asInstanceOf[d.Context],
tree1,
symbol.alias.enclClass,
symbol.enclClass,
typeEnv(symbol.alias) ++ typeEnv(tree.symbol)
)
- val t = treeCopy.ValDef(tree1, mods1, name1, tpt1, transform(rhs1))
- log("valdef " + tree + " -> " + t)
- t
-
-// val tree1 =
-// treeCopy.ValDef(tree, mods, name, tpt,
-// localTyper.typed(
-// Apply(Select(Super(currentClass, nme.EMPTY), symbol.alias.getter(symbol.alias.owner)),
-// List())))
-// debuglog("replaced ValDef: " + tree1 + " in " + tree.symbol.owner.fullName)
-// tree1
+ deriveValDef(newValDef)(transform)
case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
- if (sup.symbol.info.parents != atPhase(phase.prev)(sup.symbol.info.parents)) =>
+ if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
def parents = sup.symbol.info.parents
- debuglog(tree + " parents changed from: " + atPhase(phase.prev)(parents) + " to: " + parents)
+ debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
val res = localTyper.typed(
Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
@@ -1513,18 +1530,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- private def reskolemize(tparams: List[TypeDef]): (List[Symbol], List[Symbol]) = {
- val saved = tparams map (_.symbol)
- localTyper skolemizeTypeParams tparams
- (saved, tparams map (_.symbol))
- }
-
private def duplicateBody(tree: DefDef, source: Symbol) = {
val symbol = tree.symbol
val meth = addBody(tree, source)
val d = new Duplicator
- log("-->d DUPLICATING: " + meth)
+ debuglog("-->d DUPLICATING: " + meth)
d.retyped(
localTyper.context1.asInstanceOf[d.Context],
meth,
@@ -1542,8 +1553,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def addBody(tree: DefDef, source: Symbol): DefDef = {
val symbol = tree.symbol
- debuglog("specializing body of" + symbol.fullName + ": " + symbol.info)
- val DefDef(mods, name, tparams, vparamss, tpt, _) = tree
+ debuglog("specializing body of" + symbol.defString)
+ val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree
// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
@@ -1551,12 +1562,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("substituting " + origtparams + " for " + symbol.typeParams)
// skolemize type parameters
- val (oldtparams, newtparams) = reskolemize(tparams)
+ val oldtparams = tparams map (_.symbol)
+ val newtparams = deriveFreshSkolems(oldtparams)
+ map2(tparams, newtparams)(_ setSymbol _)
// create fresh symbols for value parameters to hold the skolem types
- val vparamss1 = List(for (vdef <- vparamss.head; param = vdef.symbol) yield {
- ValDef(param cloneSymbol symbol substInfo (oldtparams, newtparams))
- })
+ val newSyms = cloneSymbolsAtOwnerAndModify(vparams map (_.symbol), symbol, _.substSym(oldtparams, newtparams))
// replace value and type parameters of the old method with the new ones
// log("Adding body for " + tree.symbol + " - origtparams: " + origtparams + "; tparams: " + tparams)
@@ -1564,14 +1575,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// log("Type env of: " + tree.symbol + ": " + boundTvars)
// log("newtparams: " + newtparams)
val symSubstituter = new ImplementationAdapter(
- parameters(source).flatten ::: origtparams,
- vparamss1.flatten.map(_.symbol) ::: newtparams,
+ parameters(source) ::: origtparams,
+ newSyms ::: newtparams,
source.enclClass,
false) // don't make private fields public
- val tmp = symSubstituter(body(source).duplicate)
+
+ val newBody = symSubstituter(body(source).duplicate)
tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
- treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
+ copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody)
}
/** Create trees for specialized members of 'sClass', based on the
@@ -1588,13 +1600,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if m.hasFlag(SPECIALIZED)
&& (m.sourceFile ne null)
&& satisfiable(typeEnv(m), !sClass.hasFlag(SPECIALIZED))) {
- log("creating tree for " + m.fullName)
+ debuglog("creating tree for " + m.fullName)
if (m.isMethod) {
if (info(m).target.hasAccessorFlag) hasSpecializedFields = true
if (m.isClassConstructor) {
- val origParamss = parameters(info(m).target)
+ val origParams = parameters(info(m).target)
val vparams = (
- map2(m.info.paramTypes, origParamss(0))((tp, sym) =>
+ map2(m.info.paramTypes, origParams)((tp, sym) =>
m.newValue(specializedName(sym, typeEnv(sClass)), sym.pos, sym.flags) setInfo tp
)
)
@@ -1644,7 +1656,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
buf +=
ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
.setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
- log("created synthetic class: " + specCls + " of " + sym1 + " in env: " + env)
+ debuglog("created synthetic class: " + specCls + " of " + sym1 + " in env: " + env)
}
case _ =>
}
@@ -1685,7 +1697,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* - there is a getter for the specialized field in the same class
*/
def initializesSpecializedField(f: Symbol) = (
- (f.name endsWith nme.SPECIALIZED_SUFFIX_NAME)
+ (f.name endsWith nme.SPECIALIZED_SUFFIX)
&& clazz.info.member(nme.originalName(f.name)).isPublic
&& clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
)
@@ -1720,9 +1732,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
- override def transform(tree: Tree) =
- if (settings.nospecialization.value) tree
- else atPhase(phase.next)(specializeCalls(unit).transform(tree))
+ override def transform(tree: Tree) = {
+ val resultTree = if (settings.nospecialization.value) tree
+ else afterSpecialize(specializeCalls(unit).transform(tree))
+
+ // Remove the final modifier and @inline annotation from anything in the
+ // original class (since it's being overridden in at least onesubclass).
+ //
+ // We do this here so that the specialized subclasses will correctly copy
+ // final and @inline.
+ info.foreach {
+ case (sym, SpecialOverload(target, _)) => {
+ sym.resetFlag(FINAL)
+ target.resetFlag(FINAL)
+ sym.removeAnnotation(ScalaInlineClass)
+ target.removeAnnotation(ScalaInlineClass)
+ }
+ case _ => {}
+ }
+
+ resultTree
+ }
}
def printSpecStats() {
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 1655ad09c4..fdb5c7e52e 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -204,7 +204,7 @@ abstract class TailCalls extends Transform {
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
- log("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
+ debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
ctx.accessed = true
typedPos(fun.pos)(Apply(Ident(ctx.label), recv :: transformArgs))
@@ -223,13 +223,29 @@ abstract class TailCalls extends Transform {
}
tree match {
- case dd @ DefDef(mods, name, tparams, vparams, tpt, rhs) =>
- val newCtx = new Context(dd)
+ case ValDef(_, _, _, _) =>
+ if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass))
+ unit.error(tree.pos, "lazy vals are not tailcall transformed")
+
+ super.transform(tree)
- debuglog("Considering " + name + " for tailcalls")
- val newRHS = transform(rhs, newCtx)
+ case dd @ DefDef(_, _, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ val newCtx = new Context(dd)
+ def isRecursiveCall(t: Tree) = {
+ val sym = t.symbol
+ (sym != null) && {
+ sym.isMethod && (dd.symbol.name == sym.name) && (dd.symbol.enclClass isSubClass sym.enclClass)
+ }
+ }
+ if (newCtx.isMandatory) {
+ if (!rhs0.exists(isRecursiveCall)) {
+ unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
+ }
+ }
+ debuglog("Considering " + dd.name + " for tailcalls")
+ val newRHS = transform(rhs0, newCtx)
- treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt, {
+ deriveDefDef(tree)(rhs =>
if (newCtx.isTransformed) {
/** We have rewritten the tree, but there may be nested recursive calls remaining.
* If @tailrec is given we need to fail those now.
@@ -241,7 +257,7 @@ abstract class TailCalls extends Transform {
}
}
val newThis = newCtx.newThis(tree.pos)
- val vpSyms = vparams.flatten map (_.symbol)
+ val vpSyms = vparamss0.flatten map (_.symbol)
typedPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
@@ -249,12 +265,12 @@ abstract class TailCalls extends Transform {
))
}
else {
- if (newCtx.isMandatory)
+ if (newCtx.isMandatory && newRHS.exists(isRecursiveCall))
newCtx.tailrecFailure()
newRHS
}
- })
+ )
case Block(stats, expr) =>
treeCopy.Block(tree,
@@ -263,11 +279,7 @@ abstract class TailCalls extends Transform {
)
case CaseDef(pat, guard, body) =>
- treeCopy.CaseDef(tree,
- pat,
- guard,
- transform(body)
- )
+ deriveCaseDef(tree)(transform)
case If(cond, thenp, elsep) =>
treeCopy.If(tree,
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index adbb7bc7f1..ee565530b7 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -129,7 +129,7 @@ abstract class UnCurry extends InfoTransform
appliedType(NonLocalReturnControlClass.typeConstructor, List(argtype))
/** A hashmap from method symbols to non-local return keys */
- private val nonLocalReturnKeys = new mutable.HashMap[Symbol, Symbol]
+ private val nonLocalReturnKeys = perRunCaches.newMap[Symbol, Symbol]()
/** Return non-local return key for given method */
private def nonLocalReturnKey(meth: Symbol) =
@@ -144,13 +144,13 @@ abstract class UnCurry extends InfoTransform
* todo: maybe clone a pre-existing exception instead?
* (but what to do about exceptions that miss their targets?)
*/
- private def nonLocalReturnThrow(expr: Tree, meth: Symbol) =
- localTyper.typed {
- Throw(
- New(
- TypeTree(nonLocalReturnExceptionType(expr.tpe)),
- List(List(Ident(nonLocalReturnKey(meth)), expr))))
- }
+ private def nonLocalReturnThrow(expr: Tree, meth: Symbol) = localTyper typed {
+ Throw(
+ nonLocalReturnExceptionType(expr.tpe.widen),
+ Ident(nonLocalReturnKey(meth)),
+ expr
+ )
+ }
/** Transform (body, key) to:
*
@@ -166,31 +166,18 @@ abstract class UnCurry extends InfoTransform
* }
*/
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
- localTyper.typed {
- val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
- val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
- val pat = Bind(ex,
- Typed(Ident(nme.WILDCARD),
- AppliedTypeTree(Ident(NonLocalReturnControlClass),
- List(Bind(tpnme.WILDCARD,
- EmptyTree)))))
- val rhs =
- If(
- Apply(
- Select(
- Apply(Select(Ident(ex), "key"), List()),
- Object_eq),
- List(Ident(key))),
- Apply(
- TypeApply(
- Select(
- Apply(Select(Ident(ex), "value"), List()),
- Any_asInstanceOf),
- List(TypeTree(meth.tpe.finalResultType))),
- List()),
- Throw(Ident(ex)))
- val keyDef = ValDef(key, New(TypeTree(ObjectClass.tpe), List(List())))
- val tryCatch = Try(body, List(CaseDef(pat, EmptyTree, rhs)), EmptyTree)
+ localTyper typed {
+ val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
+ val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
+ val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(meth.tpe.finalResultType))
+ val rhs = (
+ IF ((ex DOT nme.key)() OBJ_EQ Ident(key))
+ THEN ((ex DOT nme.value)())
+ ELSE (Throw(Ident(ex)))
+ )
+ val keyDef = ValDef(key, New(ObjectClass.tpe))
+ val tryCatch = Try(body, pat -> rhs)
+
Block(List(keyDef), tryCatch)
}
}
@@ -228,9 +215,9 @@ abstract class UnCurry extends InfoTransform
* case P_1 if G_1 => E_1
* ...
* case P_n if G_n => true
- * case _ => this.missingCase(x)
+ * case _ => this.missingCase(expr)
* }
- * def isDefinedAtCurrent(x: T): boolean = (x: @unchecked) match {
+ * def _isDefinedAt(x: T): boolean = (x: @unchecked) match {
* case P_1 if G_1 => true
* ...
* case P_n if G_n => true
@@ -240,7 +227,7 @@ abstract class UnCurry extends InfoTransform
* new $anon()
*
* However, if one of the patterns P_i if G_i is a default pattern,
- * drop the last default clause in tghe definition of `apply` and generate for `isDefinedAtCurrent` instead
+ * drop the last default clause in the definition of `apply` and generate for `_isDefinedAt` instead
*
* def isDefinedAtCurrent(x: T): boolean = true
*/
@@ -260,7 +247,7 @@ abstract class UnCurry extends InfoTransform
else List(ObjectClass.tpe, fun.tpe, SerializableClass.tpe)
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
- val applyMethod = anonClass.newMethod(nme.apply, fun.pos, FINAL)
+ val applyMethod = anonClass.newMethod(nme.apply, fun.pos, FINAL)
applyMethod setInfoAndEnter MethodType(applyMethod newSyntheticValueParams formals, restpe)
anonClass addAnnotation serialVersionUIDAnnotation
@@ -291,73 +278,26 @@ abstract class UnCurry extends InfoTransform
val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t))
- // waiting here until we can mix case classes and extractors reliably (i.e., when virtpatmat becomes the default)
- // object VirtPatmatOpt {
- // object Last {
- // def unapply[T](xs: List[T]) = xs.lastOption
- // }
- // // keep this in synch by what's generated by combineCases/runOrElse
- // object MatcherBlock {
- // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree])] = matcher match { // TODO: BUG the unapplySeq version of the case below does not seem to work in virtpatmat??
- // case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) => Some(zero, x, matchRes, keepGoing, stats)
- // case _ => None
- // }
- // }
- // // TODO: virtpatmat use case: would be nice if could abstract over the repeated pattern more easily
- // // case Block(Last(P)) =>
- // // case P =>
- // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree], Tree => Tree)] = matcher match {
- // case MatcherBlock(zero, x, matchRes, keepGoing, stats) => Some(zero, x, matchRes, keepGoing, stats, identity[Tree])
- // case Block(outerStats, MatcherBlock(zero, x, matchRes, keepGoing, stats)) => Some(zero, x, matchRes, keepGoing, stats, inner => Block(outerStats, inner))
- // case b => treeBrowser browse b; None
- // }
- // }
-
- // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly
- def dupMatch(selector: Tree, cases: List[CaseDef], wrap: Match => Tree = identity) = {
- def transformCase(cdef: CaseDef): CaseDef =
- CaseDef(cdef.pat, cdef.guard, Literal(Constant(true)))
- def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
-
- gen.mkUncheckedMatch(
- if (cases exists treeInfo.isDefaultCase) Literal(Constant(true))
- else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate)
- )
- }
+ object isDefinedAtTransformer extends gen.MatchMatcher {
+ // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly
+ override def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = {
+ def transformCase(cdef: CaseDef): CaseDef =
+ CaseDef(cdef.pat, cdef.guard, Literal(Constant(true)))
- def dupVirtMatch(zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], wrap: Block => Tree = identity) = {
- object dropMatchResAssign extends Transformer {
- // override val treeCopy = newStrictTreeCopier // will duplicate below
- override def transform(tree: Tree): Tree = tree match {
- // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
- case Block(List(matchRes, ass@Assign(keepGoingLhs, falseLit)), zero) if keepGoingLhs.symbol eq keepGoing.symbol =>
- Block(List(ass), zero)
- case _ =>
- super.transform(tree)
- }
+ def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+
+ val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
+
+ gen.mkUncheckedMatch(
+ if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) Literal(Constant(true))
+ else substTree(wrap(Match(selector, (casesNoSynthCatchAll map transformCase) :+ defaultCase)).duplicate)
+ )
}
- val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
- val idaBlock = wrap(Block(
- zero ::
- x ::
- /* drop matchRes def */
- keepGoing ::
- statsNoMatchRes,
- NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `!keepGoing`
- ))
- substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
- }
- DefDef(m, (fun.body: @unchecked) match {
- case Match(selector, cases) =>
- dupMatch(selector, cases)
- case Block((vd: ValDef) :: Nil, Match(selector, cases)) => // can't factor this out using an extractor due to bugs in the old pattern matcher
- dupMatch(selector, cases, m => Block(List(vd), m))
- // virtpatmat -- TODO: find a better way to keep this in synch with the code generated by patmatvirtualizer
- case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), args_scrut), args_pm) if opt.virtPatmat =>
+ override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {
object noOne extends Transformer {
override val treeCopy = newStrictTreeCopier // must duplicate everything
- val one = tgt.tpe member newTermName("one")
+ val one = _match.tpe member newTermName("one")
override def transform(tree: Tree): Tree = tree match {
case Apply(fun, List(a)) if fun.symbol == one =>
// blow one's argument away since all we want to know is whether the match succeeds or not
@@ -367,15 +307,34 @@ abstract class UnCurry extends InfoTransform
super.transform(tree)
}
}
- substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform)))
- // for the optimized version of virtpatmat
- case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) if opt.virtPatmat =>
- dupVirtMatch(zero, x, matchRes, keepGoing, stats)
- case Block(outerStats, Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _)) if opt.virtPatmat => // can't factor this out using an extractor due to bugs in the old pattern matcher
- dupVirtMatch(zero, x, matchRes, keepGoing, stats, m => Block(outerStats, m))
- // case other =>
- // treeBrowser browse other
- })
+ substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher))))
+ }
+
+ override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {
+ object dropMatchResAssign extends Transformer {
+ // override val treeCopy = newStrictTreeCopier // will duplicate below
+ override def transform(tree: Tree): Tree = tree match {
+ // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
+ case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol =>
+ Block(List(assignKeepGoing), zero)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
+ val idaBlock = wrap(Block(
+ zero ::
+ x ::
+ /* drop matchRes def */
+ keepGoing ::
+ statsNoMatchRes,
+ NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing`
+ ))
+ substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
+ }
+ }
+
+ DefDef(m, isDefinedAtTransformer(fun.body))
}
val members =
@@ -385,9 +344,7 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, fun.pos)),
- Typed(
- New(TypeTree(anonClass.tpe), List(List())),
- TypeTree(fun.tpe)))
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
}
@@ -400,7 +357,7 @@ abstract class UnCurry extends InfoTransform
// when calling into scala varargs, make sure it's a sequence.
def arrayToSequence(tree: Tree, elemtp: Type) = {
- atPhase(phase.next) {
+ afterUncurry {
localTyper.typedPos(pos) {
val pt = arrayType(elemtp)
val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
@@ -424,7 +381,7 @@ abstract class UnCurry extends InfoTransform
else if (tp.bounds.hi ne tp) getManifest(tp.bounds.hi)
else localTyper.getManifestTree(tree, tp, false)
}
- atPhase(phase.next) {
+ afterUncurry {
localTyper.typedPos(pos) {
Apply(gen.mkAttributedSelect(tree, toArraySym),
List(getManifest(tree.tpe.baseType(TraversableClass).typeArgs.head)))
@@ -449,7 +406,7 @@ abstract class UnCurry extends InfoTransform
else arrayToSequence(mkArray, varargsElemType)
}
- atPhase(phase.next) {
+ afterUncurry {
if (isJava && isPrimitiveArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
suffix = localTyper.typedPos(pos) {
gen.mkRuntimeCall(nme.toObjectArray, List(suffix))
@@ -482,19 +439,38 @@ abstract class UnCurry extends InfoTransform
}
}
- /** For removing calls to specially designated methods.
+ /** Called if a tree's symbol is elidable. If it's a DefDef,
+ * replace only the body/rhs with 0/false/()/null; otherwise replace
+ * the whole tree with it.
*/
- def elideIntoUnit(tree: Tree): Tree = Literal(Constant()) setPos tree.pos setType UnitClass.tpe
- def isElidable(tree: Tree) = {
- val sym = treeInfo.methPart(tree).symbol
- // XXX settings.noassertions.value temporarily retained to avoid
- // breakage until a reasonable interface is settled upon.
- sym != null && sym.elisionLevel.exists(x => x < settings.elidebelow.value || settings.noassertions.value) && {
- log("Eliding call from " + tree.symbol.owner + " to " + sym + " based on its elision threshold of " + sym.elisionLevel.get)
- true
+ private def replaceElidableTree(tree: Tree): Tree = {
+ tree match {
+ case DefDef(_,_,_,_,_,_) =>
+ deriveDefDef(tree)(rhs => Block(Nil, gen.mkZero(rhs.tpe)) setType rhs.tpe) setSymbol tree.symbol setType tree.tpe
+ case _ =>
+ gen.mkZero(tree.tpe) setType tree.tpe
}
}
+ private def isSelfSynchronized(ddef: DefDef) = ddef.rhs match {
+ case Apply(fn @ TypeApply(Select(sel, _), _), _) =>
+ fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait
+ case _ => false
+ }
+
+ /** If an eligible method is entirely wrapped in a call to synchronized
+ * locked on the same instance, remove the synchronized scaffolding and
+ * mark the method symbol SYNCHRONIZED for bytecode generation.
+ */
+ private def translateSynchronized(tree: Tree) = tree match {
+ case dd @ DefDef(_, _, _, _, _, Apply(fn, body :: Nil)) if isSelfSynchronized(dd) =>
+ log("Translating " + dd.symbol.defString + " into synchronized method")
+ dd.symbol setFlag SYNCHRONIZED
+ deriveDefDef(dd)(_ => body)
+ case _ => tree
+ }
+ def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy
+
// ------ The tree transformers --------------------------------------------------------
def mainTransform(tree: Tree): Tree = {
@@ -532,112 +508,103 @@ abstract class UnCurry extends InfoTransform
finally this.inConstructorFlag = saved
}
- if (isElidable(tree)) elideIntoUnit(tree)
- else tree match {
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
- withNeedLift(false) {
- if (tree.symbol.isClassConstructor) {
- atOwner(tree.symbol) {
- val rhs1 = (rhs: @unchecked) match {
- case Block(stats, expr) =>
- def transformInConstructor(stat: Tree) =
- withInConstructorFlag(INCONSTRUCTOR) { transform(stat) }
- val presupers = treeInfo.preSuperFields(stats) map transformInConstructor
- val rest = stats drop presupers.length
- val supercalls = rest take 1 map transformInConstructor
- val others = rest drop 1 map transform
- treeCopy.Block(rhs, presupers ::: supercalls ::: others, transform(expr))
+ val sym = tree.symbol
+ val result = (
+ // TODO - settings.noassertions.value temporarily retained to avoid
+ // breakage until a reasonable interface is settled upon.
+ if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)))
+ replaceElidableTree(tree)
+ else translateSynchronized(tree) match {
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
+
+ withNeedLift(false) {
+ if (dd.symbol.isClassConstructor) {
+ atOwner(sym) {
+ val rhs1 = (rhs: @unchecked) match {
+ case Block(stats, expr) =>
+ def transformInConstructor(stat: Tree) =
+ withInConstructorFlag(INCONSTRUCTOR) { transform(stat) }
+ val presupers = treeInfo.preSuperFields(stats) map transformInConstructor
+ val rest = stats drop presupers.length
+ val supercalls = rest take 1 map transformInConstructor
+ val others = rest drop 1 map transform
+ treeCopy.Block(rhs, presupers ::: supercalls ::: others, transform(expr))
+ }
+ treeCopy.DefDef(
+ dd, mods, name, transformTypeDefs(tparams),
+ transformValDefss(vparamss), transform(tpt), rhs1)
}
- treeCopy.DefDef(
- tree, mods, name, transformTypeDefs(tparams),
- transformValDefss(vparamss), transform(tpt), rhs1)
+ } else {
+ super.transform(dd)
}
- } else {
- super.transform(tree)
}
- }
- case ValDef(_, _, _, rhs) =>
- val sym = tree.symbol
- if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
- if (!sym.owner.isSourceMethod)
+ case ValDef(_, _, _, rhs) =>
+ if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
+ // a local variable that is mutable and free somewhere later should be lifted
+ // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
+ if (!sym.owner.isSourceMethod)
+ withNeedLift(true) { super.transform(tree) }
+ else
+ super.transform(tree)
+ case UnApply(fn, args) =>
+ val fn1 = withInPattern(false)(transform(fn))
+ val args1 = transformTrees(fn.symbol.name match {
+ case nme.unapply => args
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeListFromReturnTypeSeq(fn.tpe))
+ case _ => sys.error("internal error: UnApply node has wrong symbol")
+ })
+ treeCopy.UnApply(tree, fn1, args1)
+
+ case Apply(fn, args) =>
+ if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
+ transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
+ else
+ withNeedLift(true) {
+ val formals = fn.tpe.paramTypes
+ treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
+ }
+
+ case Assign(Select(_, _), _) =>
withNeedLift(true) { super.transform(tree) }
- else
- super.transform(tree)
-/*
- case Apply(Select(Block(List(), Function(vparams, body)), nme.apply), args) =>
- // perform beta-reduction; this helps keep view applications small
- println("beta-reduce1: "+tree)
- withNeedLift(true) {
- mainTransform(new TreeSubstituter(vparams map (_.symbol), args).transform(body))
- }
- case Apply(Select(Function(vparams, body), nme.apply), args) =>
-// if (List.forall2(vparams, args)((vparam, arg) => treeInfo.isAffineIn(body) ||
-// treeInfo.isExprSafeToInline(arg))) =>
- // perform beta-reduction; this helps keep view applications small
- println("beta-reduce2: "+tree)
- withNeedLift(true) {
- mainTransform(new TreeSubstituter(vparams map (_.symbol), args).transform(body))
- }
-*/
- case UnApply(fn, args) =>
- val fn1 = withInPattern(false)(transform(fn))
- val args1 = transformTrees(fn.symbol.name match {
- case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeListFromReturnTypeSeq(fn.tpe))
- case _ => sys.error("internal error: UnApply node has wrong symbol")
- })
- treeCopy.UnApply(tree, fn1, args1)
-
- case Apply(fn, args) =>
- if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
- transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- else
- withNeedLift(true) {
- val formals = fn.tpe.paramTypes
- treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
- }
+ case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
+ withNeedLift(true) { super.transform(tree) }
- case Assign(Select(_, _), _) =>
- withNeedLift(true) { super.transform(tree) }
+ case ret @ Return(_) if (isNonLocalReturn(ret)) =>
+ withNeedLift(true) { super.transform(ret) }
- case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
- withNeedLift(true) { super.transform(tree) }
+ case Try(block, catches, finalizer) =>
+ if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
+ else super.transform(tree)
- case Try(block, catches, finalizer) =>
- if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
- else super.transform(tree)
+ case CaseDef(pat, guard, body) =>
+ val pat1 = withInPattern(true)(transform(pat))
+ treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
- case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(true)(transform(pat))
- treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case fun @ Function(_, _) =>
+ mainTransform(transformFunction(fun))
- case fun @ Function(_, _) =>
- mainTransform(transformFunction(fun))
+ case Template(_, _, _) =>
+ withInConstructorFlag(0) { super.transform(tree) }
- case Template(_, _, _) =>
- withInConstructorFlag(0) { super.transform(tree) }
-
- case _ =>
- val tree1 = super.transform(tree)
- if (isByNameRef(tree1)) {
- val tree2 = tree1 setType functionType(Nil, tree1.tpe)
- return {
- if (noApply contains tree2) tree2
- else localTyper.typedPos(tree1.pos)(Apply(Select(tree2, nme.apply), Nil))
+ case _ =>
+ val tree1 = super.transform(tree)
+ if (isByNameRef(tree1)) {
+ val tree2 = tree1 setType functionType(Nil, tree1.tpe)
+ return {
+ if (noApply contains tree2) tree2
+ else localTyper.typedPos(tree1.pos)(Apply(Select(tree2, nme.apply), Nil))
+ }
}
- }
- tree1
- }
- } setType {
- assert(tree.tpe != null, "tpe is null at " + tree.pos + " for " + tree.summaryString + " / " + tree)
- uncurryTreeType(tree.tpe)
+ tree1
+ }
+ )
+ assert(result.tpe != null, result + " tpe is null")
+ result setType uncurryTreeType(result.tpe)
}
- def postTransform(tree: Tree): Tree = atPhase(phase.next) {
+ def postTransform(tree: Tree): Tree = afterUncurry {
def applyUnary(): Tree = {
// TODO_NMT: verify that the inner tree of a type-apply also gets parens if the
// whole tree is a polymorphic nullary method application
@@ -663,23 +630,24 @@ abstract class UnCurry extends InfoTransform
* In particular, this case will add:
* - synthetic Java varargs forwarders for repeated parameters
*/
- case Template(parents, self, body) =>
+ case Template(_, _, _) =>
localTyper = typer.atOwner(tree, currentClass)
- val tmpl = if (!forMSIL || forMSIL) {
- treeCopy.Template(tree, parents, self, transformTrees(newMembers.toList) ::: body)
- } else super.transform(tree).asInstanceOf[Template]
- newMembers.clear
- tmpl
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- val rhs1 = nonLocalReturnKeys.get(tree.symbol) match {
- case None => rhs
- case Some(k) => atPos(rhs.pos)(nonLocalReturnTry(rhs, k, tree.symbol))
- }
- val flatdd = treeCopy.DefDef(tree, mods, name, tparams, List(vparamss.flatten), tpt, rhs1)
- if (dd.symbol hasAnnotation VarargsClass) addJavaVarargsForwarders(dd, flatdd, tree)
- flatdd
+ try deriveTemplate(tree)(transformTrees(newMembers.toList) ::: _)
+ finally newMembers.clear()
+
+ case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
+ val flatdd = copyDefDef(dd)(
+ vparamss = List(vparamss0.flatten),
+ rhs = nonLocalReturnKeys get dd.symbol match {
+ case Some(k) => atPos(rhs0.pos)(nonLocalReturnTry(rhs0, k, dd.symbol))
+ case None => rhs0
+ }
+ )
+ addJavaVarargsForwarders(dd, flatdd)
+
case Try(body, catches, finalizer) =>
- if (catches forall treeInfo.isCatchCase) tree
+ if (opt.virtPatmat) { if(catches exists (cd => !treeInfo.isCatchCase(cd))) debugwarn("VPM BUG! illegal try/catch "+ catches); tree }
+ else if (catches forall treeInfo.isCatchCase) tree
else {
val exname = unit.freshTermName("ex$")
val cases =
@@ -701,7 +669,7 @@ abstract class UnCurry extends InfoTransform
}
debuglog("rewrote try: " + catches + " ==> " + catchall);
val catches1 = localTyper.typedCases(
- tree, List(catchall), ThrowableClass.tpe, WildcardType)
+ List(catchall), ThrowableClass.tpe, WildcardType)
treeCopy.Try(tree, body, catches1, finalizer)
}
case Apply(Apply(fn, args), args1) =>
@@ -711,9 +679,9 @@ abstract class UnCurry extends InfoTransform
applyUnary()
case Select(_, _) | TypeApply(_, _) =>
applyUnary()
- case Return(expr) if (tree.symbol != currentOwner.enclMethod || currentOwner.isLazy) =>
- debuglog("non local return in "+tree.symbol+" from "+currentOwner.enclMethod)
- atPos(tree.pos)(nonLocalReturnThrow(expr, tree.symbol))
+ case ret @ Return(expr) if (isNonLocalReturn(ret)) =>
+ debuglog("non local return in "+ret.symbol+" from "+currentOwner.enclMethod)
+ atPos(ret.pos)(nonLocalReturnThrow(expr, ret.symbol))
case TypeTree() =>
tree
case _ =>
@@ -739,9 +707,9 @@ abstract class UnCurry extends InfoTransform
* It looks for the method in the `repeatedParams` map, and generates a Java-style
* varargs forwarder. It then adds the forwarder to the `newMembers` sequence.
*/
- private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef, tree: Tree): Unit = {
- if (!repeatedParams.contains(dd.symbol))
- return
+ private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef): DefDef = {
+ if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
+ return flatdd
def toSeqType(tp: Type): Type = {
val arg = elementType(ArrayClass, tp)
@@ -762,7 +730,7 @@ abstract class UnCurry extends InfoTransform
val reps = repeatedParams(dd.symbol)
val rpsymbols = reps.map(_.symbol).toSet
- val theTyper = typer.atOwner(tree, currentClass)
+ val theTyper = typer.atOwner(dd, currentClass)
val flatparams = flatdd.vparamss.head
// create the type
@@ -814,10 +782,11 @@ abstract class UnCurry extends InfoTransform
case None =>
// enter symbol into scope
currentClass.info.decls enter forwsym
-
// add the method to `newMembers`
newMembers += forwtree
}
+
+ flatdd
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 68bc80ffc4..b7a22c6ac1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -31,14 +31,14 @@ trait ContextErrors {
case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
extends AbsTypeError {
-
+
def errPos:Position = underlyingTree.pos
override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
}
case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
extends AbsTypeError {
-
+
def errPos = underlyingSym.pos
}
@@ -76,7 +76,7 @@ trait ContextErrors {
}
def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
-
+
def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
"type mismatch" + foundReqMsg(found, req) + missingArgsMsg
@@ -143,12 +143,12 @@ trait ContextErrors {
found
}
assert(!found.isErroneous && !req.isErroneous, (found, req))
-
+
issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
if (settings.explaintypes.value)
explainTypes(found, req)
}
-
+
def WithFilterError(tree: Tree, ex: AbsTypeError) = {
issueTypeError(ex)
setError(tree)
@@ -177,13 +177,13 @@ trait ContextErrors {
val calcSimilar = (
name.length > 2 && (
startingIdentCx.reportErrors
- || startingIdentCx.enclClassOrMethod.reportErrors
+ || startingIdentCx.enclClassOrMethod.reportErrors
)
)
- // avoid calculating if we're in "silent" mode.
- // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
+ // avoid calculating if we're in "silent" mode.
+ // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
val similar = {
- if (!calcSimilar) ""
+ if (!calcSimilar) ""
else {
val allowed = (
startingIdentCx.enclosingContextChain
@@ -624,11 +624,21 @@ trait ContextErrors {
setError(tree)
}
- // checkNoDoubleDefs...
- def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) =
- issueSymbolTypeError(sym0, sym1+" is defined twice"+
- {if(!settings.debug.value) "" else " in "+context0.unit}+
- {if (sym0.isMacro && sym1.isMacro) " \n(note that macros cannot be overloaded)" else ""})
+ // checkNoDoubleDefs...
+ // @PP: I hacked the filename in (context0.unit) to work around SI-4893. It would be
+ // much better if every symbol could offer some idea of where it came from, else
+ // the obviously untrue claim that something has been defined twice can only frustrate.
+ // There's no direct test because partest doesn't work, but to reproduce, separately
+ // compile the next two lines:
+ // package object foo { val x: Class[_] = null }
+ // package foo
+ def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) = {
+ val isBug = sym0.isAbstractType && sym1.isAbstractType && (sym0.name startsWith "_$")
+ issueSymbolTypeError(sym0, sym1+" is defined twice in " + context0.unit
+ + ( if (sym0.isMacro && sym1.isMacro) "\n(note that macros cannot be overloaded)" else "" )
+ + ( if (isBug) "\n(this error is likely due to a bug in the scala compiler involving wildcards in package objects)" else "" )
+ )
+ }
// cyclic errors
def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
@@ -636,11 +646,6 @@ trait ContextErrors {
def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
-
- def MacroExpandError(tree: Tree, t: Any) = {
- issueNormalTypeError(tree, "macros must return a compiler-specific tree; returned class is: " + t.getClass)
- setError(tree)
- }
}
}
@@ -667,7 +672,7 @@ trait ContextErrors {
type ErrorType = Value
val WrongNumber, NoParams, ArgsDoNotConform = Value
}
-
+
private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) =
if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) {
val methodName = nme.defaultGetterToMethod(sym1.name)
@@ -714,9 +719,15 @@ trait ContextErrors {
setError(tree)
}
- def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) =
+ def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = {
issueNormalTypeError(tree,
applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
+ // since inferMethodAlternative modifies the state of the tree
+ // we have to set the type of tree to ErrorType only in the very last
+ // fallback action that is done in the inference (tracking it manually is error prone).
+ // This avoids entering infinite loop in doTypeApply.
+ if (implicitly[Context].reportErrors) setError(tree)
+ }
def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
firstCompeting: Symbol, argtpes: List[Type], pt: Type) = {
@@ -724,6 +735,8 @@ trait ContextErrors {
"argument types " + argtpes.mkString("(", ",", ")") +
(if (pt == WildcardType) "" else " and expected result type " + pt)
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
+ // discover last attempt in a similar way as for NoBestMethodAlternativeError
+ if (implicitly[Context].ambiguousErrors) setError(tree)
issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
}
@@ -830,14 +843,14 @@ trait ContextErrors {
object NamerErrorGen {
implicit val context0 = context
-
+
object SymValidateErrors extends Enumeration {
val ImplicitConstr, ImplicitNotTerm, ImplicitTopObject,
OverrideClass, SealedNonClass, AbstractNonClass,
OverrideConstr, AbstractOverride, LazyAndEarlyInit,
ByNameParameter, AbstractVar = Value
}
-
+
object DuplicatesErrorKinds extends Enumeration {
val RenamedTwice, AppearsTwice = Value
}
@@ -845,7 +858,7 @@ trait ContextErrors {
import SymValidateErrors._
import DuplicatesErrorKinds._
import symtab.Flags
-
+
def TypeSigError(tree: Tree, ex: TypeError) = {
ex match {
case CyclicReference(sym, info: TypeCompleter) =>
@@ -854,7 +867,7 @@ trait ContextErrors {
context0.issue(TypeErrorWithUnderlyingTree(tree, ex))
}
}
-
+
def GetterDefinedTwiceError(getter: Symbol) =
issueSymbolTypeError(getter, getter+" is defined twice")
@@ -897,37 +910,37 @@ trait ContextErrors {
val msg = errKind match {
case ImplicitConstr =>
"`implicit' modifier not allowed for constructors"
-
+
case ImplicitNotTerm =>
"`implicit' modifier can be used only for values, variables and methods"
-
+
case ImplicitTopObject =>
"`implicit' modifier cannot be used for top-level objects"
-
+
case OverrideClass =>
"`override' modifier not allowed for classes"
-
+
case SealedNonClass =>
"`sealed' modifier can be used only for classes"
-
+
case AbstractNonClass =>
"`abstract' modifier can be used only for classes; it should be omitted for abstract members"
-
+
case OverrideConstr =>
"`override' modifier not allowed for constructors"
-
+
case AbstractOverride =>
"`abstract override' modifier only allowed for members of traits"
-
+
case LazyAndEarlyInit =>
"`lazy' definitions may not be initialized early"
-
+
case ByNameParameter =>
"pass-by-name arguments not allowed for case class parameters"
-
+
case AbstractVar =>
"only classes can have declared but undefined members" + abstractVarMessage(sym)
-
+
}
issueSymbolTypeError(sym, msg)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index b5afd681d2..a1ba8a2982 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -21,7 +21,7 @@ trait Contexts { self: Analyzer =>
outer = this
enclClass = this
enclMethod = this
-
+
override def nextEnclosing(p: Context => Boolean): Context = this
override def enclosingContextChain: List[Context] = Nil
override def implicitss: List[List[ImplicitInfo]] = Nil
@@ -128,6 +128,8 @@ trait Contexts { self: Analyzer =>
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
+ var buffer: Set[AbsTypeError] = _
+
def enclClassOrMethod: Context =
if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
else outer.enclClassOrMethod
@@ -145,7 +147,6 @@ trait Contexts { self: Analyzer =>
}
private[this] var mode = 0
- private[this] val buffer = LinkedHashSet[AbsTypeError]()
def errBuffer = buffer
def hasErrors = buffer.nonEmpty
@@ -160,7 +161,7 @@ trait Contexts { self: Analyzer =>
def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
def setBufferErrors() = {
- assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
+ //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
mode = BufferErrors
}
def setThrowErrors() = mode &= (~AllMask)
@@ -177,7 +178,7 @@ trait Contexts { self: Analyzer =>
buffer.clear()
current
}
-
+
def logError(err: AbsTypeError) = buffer += err
def withImplicitsDisabled[T](op: => T): T = {
@@ -225,6 +226,7 @@ trait Contexts { self: Analyzer =>
c.checking = this.checking
c.retyping = this.retyping
c.openImplicits = this.openImplicits
+ c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
@@ -237,7 +239,7 @@ trait Contexts { self: Analyzer =>
c.implicitsEnabled = true
c
}
-
+
def makeNewImport(sym: Symbol): Context =
makeNewImport(gen.mkWildcardImport(sym))
@@ -265,6 +267,7 @@ trait Contexts { self: Analyzer =>
val c = make(newtree)
c.setBufferErrors()
c.setAmbiguousErrors(reportAmbiguousErrors)
+ c.buffer = new LinkedHashSet[AbsTypeError]()
c
}
@@ -308,12 +311,14 @@ trait Contexts { self: Analyzer =>
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
def issue(err: AbsTypeError) {
+ debugwarn("issue error: " + err.errMsg)
if (reportErrors) unitError(err.errPos, addDiagString(err.errMsg))
else if (bufferErrors) { buffer += err }
else throw new TypeError(err.errPos, err.errMsg)
}
def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
+ debugwarn("issue ambiguous error: " + err.errMsg)
if (ambiguousErrors) {
if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
unitError(err.errPos, err.errMsg)
@@ -322,6 +327,7 @@ trait Contexts { self: Analyzer =>
}
def issueAmbiguousError(err: AbsTypeError) {
+ debugwarn("issue ambiguous error: " + err.errMsg)
if (ambiguousErrors)
unitError(err.errPos, addDiagString(err.errMsg))
else if (bufferErrors) { buffer += err }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 3536608efd..29831c8469 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package typechecker
import scala.tools.nsc.symtab.Flags
-
import scala.collection.{ mutable, immutable }
/** Duplicate trees and re-type check them, taking care to replace
@@ -18,6 +17,7 @@ import scala.collection.{ mutable, immutable }
*/
abstract class Duplicators extends Analyzer {
import global._
+ import definitions.{ AnyRefClass, AnyValClass }
def retyped(context: Context, tree: Tree): Tree = {
resetClassOwners
@@ -36,7 +36,7 @@ abstract class Duplicators extends Analyzer {
} else resetClassOwners
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
- log("retyped with env: " + env)
+ debuglog("retyped with env: " + env)
(new BodyDuplicator(context)).typed(tree)
}
@@ -82,14 +82,14 @@ abstract class Duplicators extends Analyzer {
val sym1 = context.scope.lookup(sym.name)
// assert(sym1 ne NoSymbol, tpe)
if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
- log("fixing " + sym + " -> " + sym1)
+ debuglog("fixing " + sym + " -> " + sym1)
typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
} else super.mapOver(tpe)
case TypeRef(pre, sym, args) =>
val newsym = updateSym(sym)
if (newsym ne sym) {
- log("fixing " + sym + " -> " + newsym)
+ debuglog("fixing " + sym + " -> " + newsym)
typeRef(mapOver(pre), newsym, mapOverArgs(args, newsym.typeParams))
} else
super.mapOver(tpe)
@@ -97,7 +97,7 @@ abstract class Duplicators extends Analyzer {
case SingleType(pre, sym) =>
val sym1 = updateSym(sym)
if (sym1 ne sym) {
- log("fixing " + sym + " -> " + sym1)
+ debuglog("fixing " + sym + " -> " + sym1)
singleType(mapOver(pre), sym1)
} else
super.mapOver(tpe)
@@ -105,7 +105,7 @@ abstract class Duplicators extends Analyzer {
case ThisType(sym) =>
val sym1 = updateSym(sym)
if (sym1 ne sym) {
- log("fixing " + sym + " -> " + sym1)
+ debuglog("fixing " + sym + " -> " + sym1)
ThisType(sym1)
} else
super.mapOver(tpe)
@@ -136,26 +136,26 @@ abstract class Duplicators extends Analyzer {
private def invalidate(tree: Tree) {
debuglog("attempting to invalidate " + tree.symbol + ", owner - " + (if (tree.symbol ne null) tree.symbol.owner else "<NULL>"))
if (tree.isDef && tree.symbol != NoSymbol) {
- log("invalid " + tree.symbol)
+ debuglog("invalid " + tree.symbol)
invalidSyms(tree.symbol) = tree
tree match {
case ldef @ LabelDef(name, params, rhs) =>
- log("LabelDef " + name + " sym.info: " + ldef.symbol.info)
+ debuglog("LabelDef " + name + " sym.info: " + ldef.symbol.info)
invalidSyms(ldef.symbol) = ldef
// breakIf(true, this, ldef, context)
val newsym = ldef.symbol.cloneSymbol(context.owner)
newsym.setInfo(fixType(ldef.symbol.info))
ldef.symbol = newsym
- log("newsym: " + newsym + " info: " + newsym.info)
+ debuglog("newsym: " + newsym + " info: " + newsym.info)
case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
- log("ValDef " + name + " sym.info: " + vdef.symbol.info)
+ debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
invalidSyms(vdef.symbol) = vdef
val newsym = vdef.symbol.cloneSymbol(context.owner)
newsym.setInfo(fixType(vdef.symbol.info))
vdef.symbol = newsym
- log("newsym: " + newsym + " info: " + newsym.info)
+ debuglog("newsym: " + newsym + " info: " + newsym.info)
case DefDef(_, name, tparams, vparamss, _, rhs) =>
// invalidate parameters
@@ -182,7 +182,7 @@ abstract class Duplicators extends Analyzer {
}
ddef.symbol = NoSymbol
enterSym(context, ddef)
- log("remapping this of " + oldClassOwner + " to " + newClassOwner)
+ debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner)
typed(ddef)
}
@@ -228,7 +228,7 @@ abstract class Duplicators extends Analyzer {
ttree
case Block(stats, res) =>
- log("invalidating block")
+ debuglog("invalidating block")
invalidate(stats)
invalidate(res)
tree.tpe = null
@@ -256,7 +256,7 @@ abstract class Duplicators extends Analyzer {
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
ldef.tpe = null
- val params1 = params map { p => Ident(updateSym(p.symbol)) }
+ val params1 = params map (p => Ident(updateSym(p.symbol)))
super.typed(treeCopy.LabelDef(tree, name, params1, rhs), mode, pt)
case Bind(name, _) =>
@@ -266,13 +266,13 @@ abstract class Duplicators extends Analyzer {
super.typed(tree, mode, pt)
case Ident(_) if tree.symbol.isLabel =>
- log("Ident to labeldef " + tree + " switched to ")
+ debuglog("Ident to labeldef " + tree + " switched to ")
tree.symbol = updateSym(tree.symbol)
tree.tpe = null
super.typed(tree, mode, pt)
case Ident(_) if (origtreesym ne null) && origtreesym.isLazy =>
- log("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
+ debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
tree.symbol = updateSym(origtreesym)
tree.tpe = null
super.typed(tree, mode, pt)
@@ -308,17 +308,26 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
*/
case Match(scrut, cases) =>
- val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
val scrutTpe = scrut1.tpe.widen
- val cases1 = if (scrutTpe.isFinalType) cases filter {
- case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
- // the typed pattern is not incompatible with the scrutinee type
- scrutTpe.matchesPattern(fixType(tpt.tpe))
- case CaseDef(Typed(_, tpt), EmptyTree, body) =>
- // the typed pattern is not incompatible with the scrutinee type
- scrutTpe.matchesPattern(fixType(tpt.tpe))
- case _ => true
- } else cases
+ val cases1 = {
+ if (scrutTpe.isFinalType) cases filter {
+ case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe matchesPattern fixType(tpt.tpe)
+ case CaseDef(Typed(_, tpt), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe matchesPattern fixType(tpt.tpe)
+ case _ => true
+ }
+ // Without this, AnyRef specializations crash on patterns like
+ // case _: Boolean => ...
+ // Not at all sure this is safe.
+ else if (scrutTpe <:< AnyRefClass.tpe)
+ cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+ else
+ cases
+ }
super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
@@ -327,7 +336,7 @@ abstract class Duplicators extends Analyzer {
tree
case _ =>
- log("default: " + tree)
+ debuglog("Duplicators default case: " + tree.summaryString)
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 036e7fc750..0ddacf7d36 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -215,7 +215,7 @@ trait Implicits {
object HasMethodMatching {
val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
-
+
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
@@ -571,7 +571,7 @@ trait Implicits {
else {
val tvars = undetParams map freshVar
def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars)
-
+
printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format(
if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "),
typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") },
@@ -594,7 +594,7 @@ trait Implicits {
// we must be conservative in leaving type params in undetparams
// prototype == WildcardType: want to remove all inferred Nothings
val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, tvars, targs)
-
+
val subst: TreeTypeSubstituter =
if (okParams.isEmpty) EmptyTreeTypeSubstituter
else {
@@ -621,7 +621,7 @@ trait Implicits {
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case t => t
}
-
+
if (context.hasErrors)
fail("typing TypeApply reported errors for the implicit tree")
else {
@@ -780,13 +780,13 @@ trait Implicits {
val newPending = undoLog undo {
is filterNot (alt => alt == i || {
try improves(i, alt)
- catch {
- case e: CyclicReference =>
+ catch {
+ case e: CyclicReference =>
if (printInfers) {
println(i+" discarded because cyclic reference occurred")
e.printStackTrace()
}
- true
+ true
}
})
}
@@ -1163,7 +1163,7 @@ trait Implicits {
/* !!! the following is almost right, but we have to splice nested manifest
* !!! types into this type. This requires a substantial extension of
* !!! reifiers.
- val reifier = new liftcode.Reifier()
+ val reifier = new Reifier()
val rtree = reifier.reifyTopLevel(tp1)
manifestFactoryCall("apply", tp, rtree)
*/
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index eac657da19..c0c801910c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -210,9 +210,9 @@ trait Infer {
def getContext = context
def issue(err: AbsTypeError): Unit = context.issue(err)
-
- def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
-
+
+ def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
+
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -228,9 +228,9 @@ trait Infer {
if (sym.isError) {
tree setSymbol sym setType ErrorType
} else {
- val topClass = context.owner.toplevelClass
+ val topClass = context.owner.enclosingTopLevelClass
if (context.unit.exists)
- context.unit.depends += sym.toplevelClass
+ context.unit.depends += sym.enclosingTopLevelClass
var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
// Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
@@ -465,7 +465,7 @@ trait Infer {
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
-
+
foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
val retract = (
targ.typeSymbol == NothingClass // only retract Nothings
@@ -640,13 +640,7 @@ trait Infer {
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case MethodType(params, _) =>
- val formals0 = params map { param =>
- param.tpe match {
- case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass => tpe
- case tpe => tpe
- }
- }
- val formals = formalTypes(formals0, argtpes0.length)
+ val formals = formalTypes(params map { _.tpe }, argtpes0.length)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -1057,49 +1051,74 @@ trait Infer {
* @param pt the expected result type of the instance
*/
def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
- val pt = widen(pt0)
- //println("infer constr inst "+tree+"/"+undetparams+"/"+pt0)
- var restpe = tree.tpe.finalResultType
- var tvars = undetparams map freshVar
+ val pt = widen(pt0)
+ val ptparams = freeTypeParamsOfTerms.collect(pt)
+ val ctorTp = tree.tpe
+ val resTp = ctorTp.finalResultType
- /** Compute type arguments for undetermined params and substitute them in given tree.
+ debuglog("infer constr inst "+ tree +"/"+ undetparams +"/ pt= "+ pt +" pt0= "+ pt0 +" resTp: "+ resTp)
+
+ /** Compute type arguments for undetermined params
*/
- def computeArgs =
- try {
- val targs = solvedTypes(tvars, undetparams, undetparams map varianceInType(restpe),
- true, lubDepth(List(restpe, pt)))
-// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
-// no checkBounds here. If we enable it, test bug602 fails.
- new TreeTypeSubstituter(undetparams, targs).traverse(tree)
- } catch ifNoInstance{ msg =>
- NoConstructorInstanceError(tree, restpe, pt, msg)
+ def inferFor(pt: Type): Option[List[Type]] = {
+ val tvars = undetparams map freshVar
+ val resTpV = resTp.instantiateTypeParams(undetparams, tvars)
+
+ if (resTpV <:< pt) {
+ try {
+ // debuglog("TVARS "+ (tvars map (_.constr)))
+ // look at the argument types of the primary constructor corresponding to the pattern
+ val variances = undetparams map varianceInType(ctorTp.paramTypes.headOption getOrElse ctorTp)
+ val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
+ // checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
+ // no checkBounds here. If we enable it, test bug602 fails.
+ // TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
+ Some(targs)
+ } catch ifNoInstance { msg =>
+ debuglog("NO INST "+ (tvars, tvars map (_.constr)))
+ NoConstructorInstanceError(tree, resTp, pt, msg)
+ None
+ }
+ } else {
+ debuglog("not a subtype: "+ resTpV +" </:< "+ pt)
+ None
}
- def instError = {
- if (settings.debug.value) Console.println("ici " + tree + " " + undetparams + " " + pt)
- if (settings.explaintypes.value) explainTypes(restpe.instantiateTypeParams(undetparams, tvars), pt)
- ConstrInstantiationError(tree, restpe, pt)
}
- if (restpe.instantiateTypeParams(undetparams, tvars) <:< pt) {
- computeArgs
- } else if (isFullyDefined(pt)) {
- debuglog("infer constr " + tree + ":" + restpe + ", pt = " + pt)
- var ptparams = freeTypeParamsOfTerms.collect(pt)
- debuglog("free type params = " + ptparams)
- val ptWithWildcards = pt.instantiateTypeParams(ptparams, ptparams map (ptparam => WildcardType))
- tvars = undetparams map freshVar
- if (restpe.instantiateTypeParams(undetparams, tvars) <:< ptWithWildcards) {
- computeArgs
- restpe = skipImplicit(tree.tpe.resultType)
- debuglog("new tree = " + tree + ":" + restpe)
- val ptvars = ptparams map freshVar
- val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
- if (isPopulated(restpe, pt1)) {
- ptvars foreach instantiateTypeVar
- } else { if (settings.debug.value) Console.println("no instance: "); instError }
- } else { if (settings.debug.value) Console.println("not a subtype " + restpe.instantiateTypeParams(undetparams, tvars) + " of " + ptWithWildcards); instError }
- } else { if (settings.debug.value) Console.println("not fully defined: " + pt); instError }
+
+ def inferForApproxPt =
+ if (isFullyDefined(pt)) {
+ inferFor(pt.instantiateTypeParams(ptparams, ptparams map (x => WildcardType))) flatMap { targs =>
+ val ctorTpInst = tree.tpe.instantiateTypeParams(undetparams, targs)
+ val resTpInst = skipImplicit(ctorTpInst.finalResultType)
+ val ptvars =
+ ptparams map {
+ // since instantiateTypeVar wants to modify the skolem that corresponds to the method's type parameter,
+ // and it uses the TypeVar's origin to locate it, deskolemize the existential skolem to the method tparam skolem
+ // (the existential skolem was created by adaptConstrPattern to introduce the type slack necessary to soundly deal with variant type parameters)
+ case skolem if skolem.isExistentialSkolem => freshVar(skolem.deSkolemize.asInstanceOf[TypeSymbol])
+ case p => freshVar(p)
+ }
+
+ val ptV = pt.instantiateTypeParams(ptparams, ptvars)
+
+ if (isPopulated(resTpInst, ptV)) {
+ ptvars foreach instantiateTypeVar
+ debuglog("isPopulated "+ resTpInst +", "+ ptV +" vars= "+ ptvars)
+ Some(targs)
+ } else None
+ }
+ } else None
+
+ (inferFor(pt) orElse inferForApproxPt) map { targs =>
+ new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+ } getOrElse {
+ debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)"))
+ // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
+ ConstrInstantiationError(tree, resTp, pt)
+ }
}
+
def instBounds(tvar: TypeVar): (Type, Type) = {
val tparam = tvar.origin.typeSymbol
val instType = toOrigin(tvar.constr.inst)
@@ -1391,10 +1410,11 @@ trait Infer {
case _ =>
}
}
+ // todo: missing test case
NoBestExprAlternativeError(tree, pt)
} else if (!competing.isEmpty) {
- if (secondTry) NoBestExprAlternativeError(tree, pt)
- else { if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt) }
+ if (secondTry) { NoBestExprAlternativeError(tree, pt); setError(tree) }
+ else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt)
} else {
// val applicable = alts1 filter (alt =>
// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
@@ -1404,10 +1424,14 @@ trait Infer {
}
}
- @inline private def inSilentMode(expr: Typer => Boolean): Boolean = {
- val silentContext = context.makeSilent(context.ambiguousErrors)
- val res = expr(newTyper(silentContext))
- if (silentContext.hasErrors) false else res
+ @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
+ val oldState = context.state
+ context.setBufferErrors()
+ val res = expr
+ val contextWithErrors = context.hasErrors
+ context.flushBuffer()
+ context.restoreState(oldState)
+ res && !contextWithErrors
}
// Checks against the name of the parameter and also any @deprecatedName.
@@ -1478,7 +1502,7 @@ trait Infer {
val applicable = resolveOverloadedMethod(argtpes, {
alts filter { alt =>
- inSilentMode(typer0 => typer0.infer.isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
+ inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
(!varArgsOnly || isVarArgsList(alt.tpe.params))
}
})
@@ -1494,14 +1518,12 @@ trait Infer {
if (improves(alt, best)) alt else best)
val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
if (best == NoSymbol) {
- if (pt == WildcardType)
- NoBestMethodAlternativeError(tree, argtpes, pt)
- else
- inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
+ if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt)
+ else inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
} else if (!competing.isEmpty) {
if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous)
AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt)
- setError(tree)
+ else setError(tree)
()
} else {
// checkNotShadowed(tree.pos, pre, best, applicable)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index c63ae90ef6..e43b1fab0b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -20,50 +20,50 @@ trait Macros { self: Analyzer =>
macroArgs(fn) :+ args
case TypeApply(fn, args) =>
macroArgs(fn) :+ args
- case Select(qual, name) if !isStaticMacro(tree.symbol) =>
+ case Select(qual, name) =>
List(List(qual))
case _ =>
List(List())
}
- private def isStaticMacro(mac: Symbol): Boolean =
- mac.owner.isModuleClass
-
/**
- * The definition of the method implementing a macro. Example:
+ * The definition of the method implementing a macro. Example:
* Say we have in a class C
*
* def macro foo[T](xs: List[T]): T = expr
*
* Then the following macro method is generated for `foo`:
*
- * def defmacro$foo(glob: scala.reflect.api.Universe)
- * (_this: glob.Tree)
- * (T: glob.Type)
- * (xs: glob.Tree): glob.Tree = {
- * implicit val $glob = glob
+ * def defmacro$foo
+ * (_context: scala.reflect.macro.Context)
+ * (_this: _context.Tree)
+ * (T: _context.TypeTree)
+ * (xs: _context.Tree): _context.Tree = {
+ * import _context._ // this means that all methods of Context can be used unqualified in macro's body
* expr
* }
*
- * If `foo` is declared in an object, the second parameter list is () instead of (_this: glob.Tree).
+ * If macro has no type arguments, the third parameter list is omitted (it's not empty, but omitted altogether).
+ *
+ * To find out the desugared representation of your particular macro, compile it with -Ymacro-debug.
*/
def macroMethDef(mdef: DefDef): Tree = {
def paramDef(name: Name, tpt: Tree) = ValDef(Modifiers(PARAM), name, tpt, EmptyTree)
val contextType = TypeTree(ReflectMacroContext.tpe)
- val globParamSec = List(paramDef(nme.context, contextType))
- def globSelect(name: Name) = Select(Ident(nme.context), name)
- def globTree = globSelect(newTypeName("Tree"))
- def globType = globSelect(newTypeName("Type"))
- val thisParamSec = if (isStaticMacro(mdef.symbol)) List() else List(paramDef(newTermName("_this"), globTree))
- def tparamInMacro(tdef: TypeDef) = paramDef(tdef.name.toTermName, globType)
+ val globParamSec = List(paramDef(nme.macroContext, contextType))
+ def globSelect(name: Name) = Select(Ident(nme.macroContext), name)
+ def globTree = globSelect(tpnme.Tree)
+ def globTypeTree = globSelect(tpnme.TypeTree)
+ val thisParamSec = List(paramDef(newTermName(nme.macroThis), globTree))
+ def tparamInMacro(tdef: TypeDef) = paramDef(tdef.name.toTermName, globTypeTree)
def vparamInMacro(vdef: ValDef): ValDef = paramDef(vdef.name, vdef.tpt match {
case tpt @ AppliedTypeTree(hk, _) if treeInfo.isRepeatedParamType(tpt) => AppliedTypeTree(hk, List(globTree))
case _ => globTree
})
def wrapImplicit(tree: Tree) = atPos(tree.pos) {
// implicit hasn't proven useful so far, so I'm disabling it
- //val implicitDecl = ValDef(Modifiers(IMPLICIT), nme.contextImplicit, SingletonTypeTree(Ident(nme.context)), Ident(nme.context))
- val importGlob = Import(Ident(nme.context), List(ImportSelector(nme.WILDCARD, -1, null, -1)))
+ //val implicitDecl = ValDef(Modifiers(IMPLICIT), nme.macroContextImplicit, SingletonTypeTree(Ident(nme.macroContext)), Ident(nme.macroContext))
+ val importGlob = Import(Ident(nme.macroContext), List(ImportSelector(nme.WILDCARD, -1, null, -1)))
Block(List(importGlob), tree)
}
var formals = (mdef.vparamss map (_ map vparamInMacro))
@@ -82,7 +82,7 @@ trait Macros { self: Analyzer =>
def addMacroMethods(templ: Template, namer: Namer): Unit = {
for (ddef @ DefDef(mods, _, _, _, _, _) <- templ.body if mods hasFlag MACRO) {
- val trace = scala.tools.nsc.util.trace when settings.debug.value
+ val trace = scala.tools.nsc.util.trace when settings.Ymacrodebug.value
val sym = namer.enterSyntheticSym(trace("macro def: ")(macroMethDef(ddef)))
trace("added to "+namer.context.owner.enclClass+": ")(sym)
}
@@ -90,33 +90,102 @@ trait Macros { self: Analyzer =>
lazy val mirror = new scala.reflect.runtime.Mirror {
lazy val libraryClassLoader = {
+ // todo. this is more or less okay, but not completely correct
+ // see https://issues.scala-lang.org/browse/SI-5433 for more info
val classpath = global.classPath.asURLs
- ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ var loader: ClassLoader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+
+ // an heuristic to detect REPL
+ if (global.settings.exposeEmptyPackage.value) {
+ import scala.tools.nsc.interpreter._
+ val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
+ loader = new AbstractFileClassLoader(virtualDirectory, loader) {}
+ }
+
+ loader
}
override def defaultReflectiveClassLoader() = libraryClassLoader
}
/** Return optionally address of companion object and implementation method symbol
- * of given macro; or None if implementation classfile cannot be loaded or does
+ * of given macro; or None if implementation classfile cannot be loaded or does
* not contain the macro implementation.
*/
def macroImpl(mac: Symbol): Option[(AnyRef, mirror.Symbol)] = {
+ val debug = settings.Ymacrodebug.value
+ val trace = scala.tools.nsc.util.trace when debug
+ trace("looking for macro implementation: ")(mac.fullNameString)
+
try {
val mmeth = macroMeth(mac)
+ trace("found implementation at: ")(mmeth.fullNameString)
+
if (mmeth == NoSymbol) None
else {
- val receiverClass: mirror.Symbol = mirror.classWithName(mmeth.owner.fullName)
+ trace("loading implementation class: ")(mmeth.owner.fullName)
+ trace("classloader is: ")("%s of type %s".format(mirror.libraryClassLoader, mirror.libraryClassLoader.getClass))
+ def inferClasspath(cl: ClassLoader) = cl match {
+ case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
+ case _ => "<unknown>"
+ }
+ trace("classpath is: ")(inferClasspath(mirror.libraryClassLoader))
+
+ // @xeno.by: relies on the fact that macros can only be defined in static classes
+ def classfile(sym: Symbol): String = {
+ def recur(sym: Symbol): String = sym match {
+ case sym if sym.owner.isPackageClass =>
+ val suffix = if (sym.isModuleClass) "$" else ""
+ sym.fullName + suffix
+ case sym =>
+ val separator = if (sym.owner.isModuleClass) "" else "$"
+ recur(sym.owner) + separator + sym.javaSimpleName.toString
+ }
+
+ if (sym.isClass || sym.isModule) recur(sym)
+ else recur(sym.enclClass)
+ }
+
+ // @xeno.by: this doesn't work for inner classes
+ // neither does mmeth.owner.javaClassName, so I had to roll my own implementation
+ //val receiverName = mmeth.owner.fullName
+ val receiverName = classfile(mmeth.owner)
+ val receiverClass: mirror.Symbol = mirror.symbolForName(receiverName)
+
+ if (debug) {
+ println("receiverClass is: " + receiverClass.fullNameString)
+
+ val jreceiverClass = mirror.classToJava(receiverClass)
+ val jreceiverSource = jreceiverClass.getProtectionDomain.getCodeSource
+ println("jreceiverClass is %s from %s".format(jreceiverClass, jreceiverSource))
+ println("jreceiverClassLoader is %s with classpath %s".format(jreceiverClass.getClassLoader, inferClasspath(jreceiverClass.getClassLoader)))
+ }
+
val receiverObj = receiverClass.companionModule
- if (receiverObj == NoSymbol) None
+ trace("receiverObj is: ")(receiverObj.fullNameString)
+
+ if (receiverObj == mirror.NoSymbol) None
else {
- val receiver = mirror.getCompanionObject(receiverClass)
+ // @xeno.by: yet another reflection method that doesn't work for inner classes
+ //val receiver = mirror.companionInstance(receiverClass)
+ val clazz = java.lang.Class.forName(receiverName, true, mirror.libraryClassLoader)
+ val receiver = clazz getField "MODULE$" get null
+
val rmeth = receiverObj.info.member(mirror.newTermName(mmeth.name.toString))
- Some((receiver, rmeth))
+ if (debug) {
+ println("rmeth is: " + rmeth.fullNameString)
+ println("jrmeth is: " + mirror.methodToJava(rmeth))
+ }
+
+ if (rmeth == mirror.NoSymbol) None
+ else {
+ Some((receiver, rmeth))
+ }
}
}
} catch {
case ex: ClassNotFoundException =>
+ trace("implementation class failed to load: ")(ex.toString)
None
}
}
@@ -125,32 +194,62 @@ trait Macros { self: Analyzer =>
* Or, if that fails, and the macro overrides a method return
* tree that calls this method instead of the macro.
*/
- def macroExpand(tree: Tree, context: Context): Option[Any] = {
+ def macroExpand(tree: Tree, typer: Typer): Option[Any] = {
+ val trace = scala.tools.nsc.util.trace when settings.Ymacrodebug.value
+ trace("macroExpand: ")(tree)
+
val macroDef = tree.symbol
macroImpl(macroDef) match {
case Some((receiver, rmeth)) =>
val argss = List(global) :: macroArgs(tree)
val paramss = macroMeth(macroDef).paramss
+ trace("paramss: ")(paramss)
val rawArgss = for ((as, ps) <- argss zip paramss) yield {
if (isVarArgsList(ps)) as.take(ps.length - 1) :+ as.drop(ps.length - 1)
else as
}
val rawArgs: Seq[Any] = rawArgss.flatten
+ trace("rawArgs: ")(rawArgs)
+ val savedInfolevel = nodePrinters.infolevel
try {
- Some(mirror.invoke(receiver, rmeth, rawArgs: _*))
+ // @xeno.by: InfoLevel.Verbose examines and prints out infos of symbols
+ // by the means of this'es these symbols can climb up the lexical scope
+ // when these symbols will be examined by a node printer
+ // they will enumerate and analyze their children (ask for infos and tpes)
+ // if one of those children involves macro expansion, things might get nasty
+ // that's why I'm temporarily turning this behavior off
+ nodePrinters.infolevel = nodePrinters.InfoLevel.Quiet
+ val expanded = mirror.invoke(receiver, rmeth)(rawArgs: _*)
+ expanded match {
+ case expanded: Tree =>
+ val expectedTpe = tree.tpe
+ val typed = typer.typed(expanded, EXPRmode, expectedTpe)
+ Some(typed)
+ case expanded if expanded.isInstanceOf[Tree] =>
+ typer.context.unit.error(tree.pos, "macro must return a compiler-specific tree; returned value is Tree, but it doesn't belong to this compiler's universe")
+ None
+ case expanded =>
+ typer.context.unit.error(tree.pos, "macro must return a compiler-specific tree; returned value is of class: " + expanded.getClass)
+ None
+ }
} catch {
case ex =>
val realex = ReflectionUtils.unwrapThrowable(ex)
- val stacktrace = new java.io.StringWriter()
- realex.printStackTrace(new java.io.PrintWriter(stacktrace))
- val msg = System.getProperty("line.separator") + stacktrace
- context.unit.error(tree.pos, "exception during macro expansion: " + msg)
+ val msg = if (settings.Ymacrodebug.value) {
+ val stacktrace = new java.io.StringWriter()
+ realex.printStackTrace(new java.io.PrintWriter(stacktrace))
+ System.getProperty("line.separator") + stacktrace
+ } else {
+ realex.getMessage
+ }
+ typer.context.unit.error(tree.pos, "exception during macro expansion: " + msg)
None
+ } finally {
+ nodePrinters.infolevel = savedInfolevel
}
case None =>
- val trace = scala.tools.nsc.util.trace when settings.debug.value
def notFound() = {
- context.unit.error(tree.pos, "macro implementation not found: " + macroDef.name)
+ typer.context.unit.error(tree.pos, "macro implementation not found: " + macroDef.name)
None
}
def fallBackToOverridden(tree: Tree): Option[Tree] = {
@@ -159,7 +258,7 @@ trait Macros { self: Analyzer =>
macroDef.allOverriddenSymbols match {
case first :: _ =>
Some(Select(qual, name) setPos tree.pos setSymbol first)
- case _ =>
+ case _ =>
trace("macro is not overridden: ")(tree)
notFound()
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 0c32ff32c0..088a56cd7b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -18,7 +18,7 @@ trait MethodSynthesis {
import global._
import definitions._
import CODE._
-
+
object synthesisUtil {
type M[T] = Manifest[T]
type CM[T] = ClassManifest[T]
@@ -39,7 +39,7 @@ trait MethodSynthesis {
typeRef(container.typeConstructor.prefix, container, args map (_.tpe))
}
-
+
def companionType[T](implicit m: M[T]) =
getRequiredModule(m.erasure.getName).tpe
@@ -71,7 +71,7 @@ trait MethodSynthesis {
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
private def isOverride(name: TermName) =
clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
-
+
def newMethodFlags(name: TermName) = {
val overrideFlag = if (isOverride(name)) OVERRIDE else 0L
overrideFlag | SYNTHETIC
@@ -82,7 +82,7 @@ trait MethodSynthesis {
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
- logResult("finishMethod")(localTyper typed ValOrDefDef(method, f(method)))
+ localTyper typed ValOrDefDef(method, f(method))
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
@@ -200,7 +200,7 @@ trait MethodSynthesis {
map (acc => atPos(vd.pos.focus)(acc derive annotations))
filterNot (_ eq EmptyTree)
)
- log(trees.mkString("Accessor trees:\n ", "\n ", "\n"))
+ // log(trees.mkString("Accessor trees:\n ", "\n ", "\n"))
if (vd.symbol.isLazy) List(stat)
else trees
case _ =>
@@ -282,7 +282,7 @@ trait MethodSynthesis {
}
}
private def logDerived(result: Tree): Tree = {
- log("[+derived] " + ojoin(mods.defaultFlagString, basisSym.accurateKindString, basisSym.getterName.decode)
+ debuglog("[+derived] " + ojoin(mods.defaultFlagString, basisSym.accurateKindString, basisSym.getterName.decode)
+ " (" + derivedSym + ")\n " + result)
result
@@ -326,22 +326,35 @@ trait MethodSynthesis {
super.validate()
}
- // keep type tree of original abstract field
- private def fixTypeTree(dd: DefDef): DefDef = {
- dd.tpt match {
- case tt: TypeTree if dd.rhs == EmptyTree =>
- tt setOriginal tree.tpt
- case tpt =>
- tpt setPos tree.tpt.pos.focus
- }
- dd
- }
override def derivedTree: DefDef = {
- fixTypeTree {
- DefDef(derivedSym,
- if (mods.isDeferred) EmptyTree
- else gen.mkCheckInit(fieldSelection)
- )
+ // For existentials, don't specify a type for the getter, even one derived
+ // from the symbol! This leads to incompatible existentials for the field and
+ // the getter. Let the typer do all the work. You might think "why only for
+ // existentials, why not always," and you would be right, except: a single test
+ // fails, but it looked like some work to deal with it. Test neg/t0606.scala
+ // starts compiling (instead of failing like it's supposed to) because the typer
+ // expects to be able to identify escaping locals in typedDefDef, and fails to
+ // spot that brand of them. In other words it's an artifact of the implementation.
+ val tpt = derivedSym.tpe.finalResultType match {
+ case ExistentialType(_, _) => TypeTree()
+ case tp => TypeTree(tp)
+ }
+ tpt setPos focusPos(derivedSym.pos)
+ // keep type tree of original abstract field
+ if (mods.isDeferred)
+ tpt setOriginal tree.tpt
+
+ // TODO - reconcile this with the DefDef creator in Trees (which
+ // at this writing presented no way to pass a tree in for tpt.)
+ atPos(derivedSym.pos) {
+ DefDef(
+ Modifiers(derivedSym.flags),
+ derivedSym.name.toTermName,
+ Nil,
+ Nil,
+ tpt,
+ if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection)
+ ) setSymbol derivedSym
}
}
}
@@ -363,7 +376,7 @@ trait MethodSynthesis {
override def keepClean = !mods.isParamAccessor
override def derivedTree = (
if (mods.isDeferred) EmptyTree
- else treeCopy.ValDef(tree, mods | flagsExtra, name, tree.tpt, tree.rhs)
+ else copyValDef(tree)(mods = mods | flagsExtra, name = this.name)
)
}
case class Param(tree: ValDef) extends DerivedFromValDef {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 35ee46363c..1566897dab 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -277,12 +277,16 @@ trait Namers extends MethodSynthesis {
def assignAndEnterFinishedSymbol(tree: MemberDef): Symbol = {
val sym = assignAndEnterSymbol(tree)
sym setInfo completerOf(tree)
- log("[+info] " + sym.fullLocationString)
+ // log("[+info] " + sym.fullLocationString)
sym
}
private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
- log("[+symbol] " + sym.hasFlagsToString(-1L) + " " + sym)
+ sym.name.toTermName match {
+ case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
+ case _ =>
+ log("[+symbol] " + sym.debugLocationString)
+ }
tree.symbol = sym
sym
}
@@ -1315,7 +1319,7 @@ trait Namers extends MethodSynthesis {
catch typeErrorHandler(tree, ErrorType)
result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => typer.deskolemizeTypeParams(tparams)(result)
+ case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
case _ => result
}
}
@@ -1478,8 +1482,11 @@ trait Namers extends MethodSynthesis {
private val ownerSym = owner.symbol
override val typeParams = tparams map (_.symbol) //@M
override val tree = restp.tree
- if (ownerSym.isTerm)
- typer skolemizeTypeParams tparams
+
+ if (ownerSym.isTerm) {
+ val skolems = deriveFreshSkolems(tparams map (_.symbol))
+ map2(tparams, skolems)(_ setSymbol _)
+ }
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 359e72e3e4..c621497618 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -37,21 +37,17 @@ trait NamesDefaults { self: Analyzer =>
}
def isNamed(arg: Tree) = nameOf(arg).isDefined
- /** @param pos maps indicies from old to new */
+ /** @param pos maps indices from old to new */
def reorderArgs[T: ClassManifest](args: List[T], pos: Int => Int): List[T] = {
val res = new Array[T](args.length)
- // (hopefully) faster than zipWithIndex
- (0 /: args) { case (index, arg) => res(pos(index)) = arg; index + 1 }
+ foreachWithIndex(args)((arg, index) => res(pos(index)) = arg)
res.toList
}
- /** @param pos maps indicies from new to old (!) */
+ /** @param pos maps indices from new to old (!) */
def reorderArgsInv[T: ClassManifest](args: List[T], pos: Int => Int): List[T] = {
val argsArray = args.toArray
- val res = new mutable.ListBuffer[T]
- for (i <- 0 until argsArray.length)
- res += argsArray(pos(i))
- res.toList
+ argsArray.indices map (i => argsArray(pos(i))) toList
}
/** returns `true` if every element is equal to its index */
@@ -432,11 +428,11 @@ trait NamesDefaults { self: Analyzer =>
}
} else NoSymbol
}
-
+
private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
val savedParams = context.extractUndetparams()
val savedReporting = context.ambiguousErrors
-
+
context.setAmbiguousErrors(false)
try fn(savedParams)
finally {
@@ -455,7 +451,7 @@ trait NamesDefaults { self: Analyzer =>
|| (ctx.owner.rawInfo.member(name) != NoSymbol)
)
)
-
+
/** A full type check is very expensive; let's make sure there's a name
* somewhere which could potentially be ambiguous before we go that route.
*/
@@ -507,7 +503,7 @@ trait NamesDefaults { self: Analyzer =>
/**
* Removes name assignments from args. Additionally, returns an array mapping
- * argument indicies from call-site-order to definition-site-order.
+ * argument indices from call-site-order to definition-site-order.
*
* Verifies that names are not specified twice, positional args don't appear
* after named ones.
@@ -523,7 +519,7 @@ trait NamesDefaults { self: Analyzer =>
def matchesName(param: Symbol) = !param.isSynthetic && (
(param.name == name) || (param.deprecatedParamName match {
case Some(`name`) =>
- context0.unit.deprecationWarning(arg.pos,
+ context0.unit.deprecationWarning(arg.pos,
"the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.")
true
case _ => false
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index 6d31243fd0..b060fd7121 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -43,7 +43,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val outer = newTermName("<outer>")
val runOrElse = newTermName("runOrElse")
val zero = newTermName("zero")
- val __match = newTermName("__match")
+ val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
def counted(str: String, i: Int) = newTermName(str+i)
}
@@ -51,8 +51,8 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
object MatchTranslator {
def apply(typer: Typer): MatchTranslation = {
import typer._
- // typing `__match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
- newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName.__match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ // typing `_match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
+ newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
case SilentResultValue(ms) => new PureMatchTranslator(typer, ms)
case _ => new OptimizingMatchTranslator(typer)
}
@@ -116,6 +116,10 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
import typer.{typed, context, silent, reallyExists}
+ private def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
+ case _ => tp
+ }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -133,11 +137,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// and the only place that emits Matches after typers is for exception handling anyway)
assert(phase.id <= currentRun.typerPhase.id, phase)
- def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
- case _ => tp
- }
-
val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen))
val scrutSym = freshSym(scrut.pos, pureType(scrutType))
@@ -146,6 +145,47 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner)
}
+ // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
+ // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
+ // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
+ // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
+ // unlike translateMatch, we type our result before returning it
+ def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
+ // if they're already simple enough to be handled by the back-end, we're done
+ if (caseDefs forall treeInfo.isCatchCase) caseDefs
+ else {
+ val okPt = repeatedToSeq(pt)
+ val switch = {
+ val bindersAndCases = caseDefs map { caseDef =>
+ // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
+ // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
+ val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, okPt)(caseDef), EmptySubstitution))
+ }
+
+ (emitTypeSwitch(bindersAndCases, pt) map (_.map(fixerUpper(matchOwner, pos).apply(_).asInstanceOf[CaseDef])))
+ }
+
+ val catches = switch getOrElse {
+ val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, okPt)(caseDef), EmptySubstitution))}
+
+ val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+
+ List(
+ atPos(pos) {
+ CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
+ EmptyTree,
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, scrut => Throw(CODE.REF(exSym)))
+ )
+ })
+ }
+
+ typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ }
+
+
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
@@ -213,13 +253,14 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
}
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
+
object MaybeBoundTyped {
- // the returned type is the one inferred by inferTypedPattern (`owntype`)
+ /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
+ * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
+ * The returned type is the one inferred by inferTypedPattern (`owntype`)
+ *
+ * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
+ */
def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
case Bound(subpatBinder, typed@Typed(expr, tpt)) => Some((subpatBinder, typed.tpe))
case Bind(_, typed@Typed(expr, tpt)) => Some((patBinder, typed.tpe))
@@ -668,6 +709,10 @@ class Foo(x: Other) { x._1 } // no error in this order
def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] =
None
+ // for catch
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None
+
abstract class TreeMaker {
/** captures the scope and the value of the bindings in patterns
* important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
@@ -788,6 +833,7 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ // TODO: normalize construction, which yields a combination of a EqualityTestTreeMaker (when necessary) and a TypeTestTreeMaker
case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker {
val nextBinderTp = glb(List(patBinder.info.widen, pt))
@@ -843,6 +889,10 @@ class Foo(x: Other) { x._1 } // no error in this order
val cond = typeAndEqualityTest(patBinder, pt)
val res = codegen._asInstanceOf(patBinder, nextBinderTp)
+
+ // TODO: remove this
+ def isStraightTypeTest = cond match { case TypeApply(_, _) => cond.symbol == Any_isInstanceOf case _ => false }
+
override def toString = "TET"+(patBinder, pt)
}
@@ -926,25 +976,30 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){
- val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = {
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, CODE.MATCHERROR(_))
+ }
- emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFail: Tree => Tree): Tree = fixerUpper(owner, scrut.pos){
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt).getOrElse{
val (matcher, hasDefault, toHoist) =
- if (casesUnOpt nonEmpty) {
+ if (casesNoSubstOnly nonEmpty) {
// when specified, need to propagate pt explicitly (type inferencer can't handle it)
val optPt =
if (isFullyDefined(pt)) inMatchMonad(pt)
else NoType
- // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // do this check on casesNoSubstOnly, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
// exhaustivity and reachability must be checked before optimization as well
- val hasDefault = casesUnOpt.nonEmpty && {
- val nonTrivLast = casesUnOpt.last
+ // TODO: improve, a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ val hasDefault = casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
}
- val (cases, toHoist) = optimizeCases(scrutSym, casesUnOpt, pt)
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
val combinedCases =
cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt))
@@ -952,7 +1007,11 @@ class Foo(x: Other) { x._1 } // no error in this order
(combinedCases, hasDefault, toHoist)
} else (codegen.zero, false, Nil)
- val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault)
+ // catch-all
+ val catchAll =
+ if (hasDefault) None // no need for a catch-all when there's already a default
+ else Some(matchFail)
+ val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, catchAll)
if (toHoist isEmpty) expr
else Block(toHoist, expr)
}
@@ -966,7 +1025,7 @@ class Foo(x: Other) { x._1 } // no error in this order
// TODO: do this during tree construction, but that will require tracking the current owner in treemakers
// TODO: assign more fine-grained positions
// fixes symbol nesting, assigns positions
- private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
currentOwner = origOwner
override def traverse(t: Tree) {
@@ -1019,7 +1078,7 @@ class Foo(x: Other) { x._1 } // no error in this order
// codegen relevant to the structure of the translation (how extractors are combined)
trait AbsCodegen {
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
def zero: Tree
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
@@ -1098,10 +1157,10 @@ class Foo(x: Other) { x._1 } // no error in this order
protected def matchMonadSym = oneSig.finalResultType.typeSymbol
import CODE._
- def __match(n: Name): SelectStart = matchStrategy DOT n
+ def _match(n: Name): SelectStart = matchStrategy DOT n
private lazy val oneSig: Type =
- typer.typed(__match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
+ typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -1110,14 +1169,15 @@ class Foo(x: Other) { x._1 } // no error in this order
object pureCodegen extends CommonCodegen { import CODE._
//// methods in MatchingStrategy (the monad companion) -- used directly in translation
// __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree
- = __match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
+ // TODO: consider catchAll, or virtualized matching will break in exception handlers
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
+ = _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
// __match.one(`res`)
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (__match(vpmName.one)) (res)
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (_match(vpmName.one)) (res)
// __match.zero
- def zero: Tree = __match(vpmName.zero)
+ def zero: Tree = _match(vpmName.zero)
// __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree, tp: Type): Tree = __match(vpmName.guard) APPLY (c, then)
+ def guard(c: Tree, then: Tree, tp: Type): Tree = _match(vpmName.guard) APPLY (c, then)
//// methods in the monad instance -- used directly in translation
// `prev`.flatMap(`b` => `next`)
@@ -1437,94 +1497,145 @@ class Foo(x: Other) { x._1 } // no error in this order
}
}
- //// SWITCHES
+ //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
- object SwitchablePattern { def unapply(pat: Tree) = pat match {
- case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches
- case _ => false
- }}
-
- // def isSwitchable(cases: List[(List[TreeMaker], Tree)]): Boolean = {
- // def isSwitchableTreeMaker(tm: TreeMaker) = tm match {
- // case tm@EqualityTestTreeMaker(_, SwitchablePattern(), _) => true
- // case SubstOnlyTreeMaker(_) => true
- // case AlternativesTreeMaker(_, altss, _) => altss forall (_.forall(isSwitchableTreeMaker))
- // case _ => false
- // }
- // }
+ abstract class SwitchMaker {
+ abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
+ val SwitchableTreeMaker: SwitchableTreeMakerExtractor
- private val switchableTpes = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ def alternativesSupported: Boolean
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = {
- def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ def isDefault(x: CaseDef): Boolean
+ def defaultSym: Symbol
+ def defaultBody: Tree
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef
+
+ private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
if (xs exists (_.isEmpty)) None else Some(xs.flatten)
- def isSwitchableTpe(tpe: Type): Boolean =
- switchableTpes contains tpe
- def switchableConstToInt(x: Tree): Tree = {
- val Literal(const) = x
- const.tag match {
- case IntTag => x
- case ByteTag | ShortTag | CharTag => Literal(Constant(const.intValue))
+ // empty list ==> failure
+ def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] = {
+ val caseDefs = cases map { case (scrutSym, makers) =>
+ makers match {
+ // default case
+ case (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(defaultCase(scrutSym, btm.substitution(body)))
+ // constant (or typetest for typeSwitch)
+ case SwitchableTreeMaker(pattern) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(CaseDef(pattern, EmptyTree, btm.substitution(body)))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil if alternativesSupported =>
+ val casePatterns = altss map {
+ case SwitchableTreeMaker(pattern) :: Nil =>
+ Some(pattern)
+ case _ =>
+ None
+ }
+
+ sequence(casePatterns) map { patterns =>
+ val substedBody = btm.substitution(body)
+ CaseDef(Alternative(patterns), EmptyTree, substedBody)
+ }
+ case _ => //println("can't emit switch for "+ makers)
+ None //failure (can't translate pattern to a switch)
+ }
}
- }
- val caseDefs = cases map { makers =>
- removeSubstOnly(makers) match {
- // default case (don't move this to unfold, as it may only occur on the top level, not as an alternative -- well, except in degenerate matches)
- case (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body)))
- // constant
- case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(switchableConstToInt(const), EmptyTree, btm.substitution(body)))
- // alternatives
- case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty)
- val caseConstants = altss map {
- case EqualityTestTreeMaker(_, const@SwitchablePattern(), _) :: Nil =>
- Some(switchableConstToInt(const))
- case _ =>
- None
+ (for(
+ caseDefs <- sequence(caseDefs)) yield
+ if (caseDefs exists isDefault) caseDefs
+ else {
+ caseDefs :+ defaultCase()
}
+ ) getOrElse Nil
+ }
+ }
- sequence(caseConstants) map { contants =>
- val substedBody = btm.substitution(body)
- CaseDef(Alternative(contants), EmptyTree, substedBody)
- }
- case _ =>
- None //failure (can't translate pattern to a switch)
+ class RegularSwitchMaker(scrutSym: Symbol) extends SwitchMaker {
+ val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val alternativesSupported = true
+
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
+ case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
+ Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
+ case _ => None
+ }}
+
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
+ case _ => None
}
}
- if (!isSwitchableTpe(scrut.tpe))
- None // TODO: emit a cast of the scrutinee and a switch on the cast scrutinee if patterns allow switch but the type of the scrutinee doesn't
- else {
- sequence(caseDefs) map { caseDefs =>
- import CODE._
- val caseDefsWithDefault = {
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
- val hasDefault = caseDefs exists isDefault
- if (hasDefault) caseDefs else {
- val default = atPos(scrut.pos) { DEFAULT ==> MATCHERROR(REF(scrutSym)) }
- caseDefs :+ default
- }
- }
- val matcher = BLOCK(
- if (scrut.tpe != IntClass.tpe) {
- scrutSym setInfo IntClass.tpe
- VAL(scrutSym) === (scrut DOT nme.toInt)
- } else {
- VAL(scrutSym) === scrut
- },
- Match(REF(scrutSym), caseDefsWithDefault) // match on scrutSym, not scrut to avoid duplicating scrut
- )
- // matcher filter (tree => tree.tpe == null) foreach println
- // treeBrowser browse matcher
- matcher // set type to avoid recursion in typedMatch
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ def defaultSym: Symbol = scrutSym
+ def defaultBody: Tree = { import CODE._; MATCHERROR(REF(scrutSym)) }
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ DEFAULT ==> body
+ }}
+ }
+
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym)
+ // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
+ if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
+ val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else {
+ // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
+ val scrutToInt: Tree =
+ if(scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ else (REF(scrutSym) DOT (nme.toInt))
+ Some(BLOCK(
+ VAL(scrutSym) === scrut,
+ Match(scrutToInt, caseDefsWithDefault)
+ ))
}
+ } else None
+ }
+
+ // for the catch-cases in a try/catch
+ private object typeSwitchMaker extends SwitchMaker {
+ def switchableTpe(tp: Type) = true
+ val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+
+ // TODO: there are more treemaker-sequences that can be handled by type tests
+ // analyze the result of approximateTreeMaker rather than the TreeMaker itself
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case tm@TypeTestTreeMaker(_, _, _) =>
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */)) // -- TODO: use this if binder does not occur in the body
+ case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) if tm.isStraightTypeTest =>
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */))
+ case _ =>
+ None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
}
+
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ def defaultBody: Tree = Throw(CODE.REF(defaultSym))
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) ==> body
+ }}
+ }
+
+ // TODO: drop null checks
+ override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
+ val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else Some(caseDefsWithDefault)
}
}
@@ -1551,33 +1662,31 @@ class Foo(x: Other) { x._1 } // no error in this order
/** Inline runOrElse and get rid of Option allocations
*
- * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse (throw new MatchError(x))
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
* the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
@inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass)
lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE
lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean) = {
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]) = {
matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
BLOCK(
VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
- VAL(scrutSym) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for scrutSym in `matcher` -- the owner structure is repaired by fixerUpper
+ VAL(scrutSym) === scrut,
VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
VAL(keepGoing) === TRUE,
matcher,
- if(hasDefault) REF(matchRes)
- else (IF (REF(keepGoing)) THEN MATCHERROR(REF(scrutSym)) ELSE REF(matchRes))
+ catchAll map { catchAllGen => (IF (REF(keepGoing)) THEN catchAllGen(REF(scrutSym)) ELSE REF(matchRes)) } getOrElse REF(matchRes)
)
}
// only used to wrap the RHS of a body
def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
BLOCK(
- if (dontStore(matchPt)) res // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
- else (REF(matchRes) === res), // _asInstanceOf(res, tp.widen, force = true)
- REF(keepGoing) === FALSE,
+ REF(keepGoing) === FALSE, // comes before assignment to matchRes, so the latter is in tail positions (can ignore the trailing zero -- will disappear when we flatten blocks, which is TODO)
+ if (dontStore(matchPt)) res else (REF(matchRes) === res), // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
zero // to have a nice lub for lubs -- otherwise we'll get a boxed unit here -- TODO: get rid of all those dangling else zero's
)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 04213cfda7..1e17cb2e3f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -150,7 +150,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// Override checking ------------------------------------------------------------
-
+
def isJavaVarargsAncestor(clazz: Symbol) = (
clazz.isClass
&& clazz.isJavaDefined
@@ -167,14 +167,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
val self = clazz.thisType
val bridges = new ListBuffer[Tree]
-
+
def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
-
+
val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
bridge.setInfo(bridgetpe.cloneInfo(bridge))
clazz.info.decls enter bridge
-
+
val params = bridge.paramss.head
val elemtp = params.last.tpe.typeArgs.head
val idents = params map Ident
@@ -183,7 +183,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
localTyper typed DefDef(bridge, body)
}
-
+
// For all concrete non-private members that have a (Scala) repeated parameter:
// compute the corresponding method type `jtpe` with a Java repeated parameter
// if a method with type `jtpe` exists and that method is not a varargs bridge
@@ -203,7 +203,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
}
-
+
bridges.toList
}
else Nil
@@ -276,10 +276,13 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
* of class `clazz` are met.
*/
def checkOverride(member: Symbol, other: Symbol) {
+ debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
+
def memberTp = self.memberType(member)
def otherTp = self.memberType(other)
def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
def isRootOrNone(sym: Symbol) = sym == RootClass || sym == NoSymbol
+ def isNeitherInClass = (member.owner != clazz) && (other.owner != clazz)
def objectOverrideErrorMsg = (
"overriding " + other.fullLocationString + " with " + member.fullLocationString + ":\n" +
"an overriding object must conform to the overridden object's class bound" +
@@ -334,7 +337,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def deferredCheck = member.isDeferred || !other.isDeferred
def subOther(s: Symbol) = s isSubClass other.owner
def subMember(s: Symbol) = s isSubClass member.owner
-
+
if (subOther(member.owner) && deferredCheck) {
//Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG
return
@@ -381,7 +384,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
overrideError("cannot override final member");
// synthetic exclusion needed for (at least) default getters.
} else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) {
- overrideError("needs `override' modifier");
+ if (isNeitherInClass && !(other.owner isSubClass member.owner))
+ emitOverrideError(
+ clazz + " inherits conflicting members:\n "
+ + infoStringWithLocation(other) + " and\n " + infoStringWithLocation(member)
+ + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)"
+ )
+ else
+ overrideError("needs `override' modifier")
} else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) {
overrideError("needs `abstract override' modifiers")
} else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
@@ -420,12 +430,12 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
overrideTypeError();
- }
+ }
else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
// overrideError("may not be parameterized");
val otherTp = self.memberInfo(other)
-
+
if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
overrideTypeError(); // todo: do an explaintypes with bounds here
explainTypes(_.bounds containsType _, otherTp, memberTp)
@@ -515,16 +525,17 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
!other.isDeferred && other.isJavaDefined && {
// #3622: erasure operates on uncurried types --
// note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
- def uncurryAndErase(tp: Type) = erasure.erasure(sym, uncurry.transformInfo(sym, tp))
+ // !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erreneous of inaccessible type - check whether that's still the case!
+ def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
- atPhase(currentRun.erasurePhase.next)(tp1 matches tp2)
+ afterErasure(tp1 matches tp2)
})
def ignoreDeferred(member: Symbol) = (
(member.isAbstractType && !member.isFBounded) || (
member.isJavaDefined &&
- // the test requires atPhase(erasurePhase.next) so shouldn't be
+ // the test requires afterErasure so shouldn't be
// done if the compiler has no erasure phase available
(currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
)
@@ -1053,7 +1064,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass)
- def isSpecial(s: Symbol) = isValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass) || isMaybeValue(s)
+ def isSpecial(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass) || isMaybeValue(s)
def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
@@ -1074,7 +1085,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
if (nullCount == 2)
nonSensible("", true) // null == null
else if (nullCount == 1) {
- if (onSyms(_ exists isValueClass)) // null == 5
+ if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
nonSensible("", false)
else if (onTrees( _ exists isNew)) // null == new AnyRef
nonSensibleWarning("a fresh object", false)
@@ -1167,7 +1178,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case vsym => ValDef(vsym)
}
}
- def createStaticModuleAccessor() = atPhase(phase.next) {
+ def createStaticModuleAccessor() = afterRefchecks {
val method = (
sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
@@ -1178,7 +1189,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
vdef,
localTyper.typedPos(tree.pos) {
val vsym = vdef.symbol
- atPhase(phase.next) {
+ afterRefchecks {
val rhs = gen.newModule(sym, vsym.tpe)
val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
DefDef(sym, body.changeOwner(vsym -> sym))
@@ -1214,12 +1225,12 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
else gen.mkAssignAndReturn(vsym, rhs)
)
val lazyDef = atPos(tree.pos)(DefDef(lazySym, body.changeOwner(vsym -> lazySym)))
- log("Made lazy def: " + lazyDef)
+ debuglog("Created lazy accessor: " + lazyDef)
if (hasUnitType) List(typed(lazyDef))
else List(
typed(ValDef(vsym)),
- atPhase(phase.next)(typed(lazyDef))
+ afterRefchecks(typed(lazyDef))
)
}
@@ -1443,26 +1454,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
transform(qual)
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
- unit.deprecationWarning(tree.pos,
- "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
- val manif = {
- var etpe = tpt.tpe
- for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
- if (etpe == NoType) {
- unit.error(tree.pos, "too many dimensions for array creation")
- Literal(Constant(null))
- } else {
- localTyper.getManifestTree(tree, etpe, false)
- }
- }
- val newResult = localTyper.typedPos(tree.pos) {
- new ApplyToImplicitArgs(gen.mkMethodCall(ArrayModule, nme.ofDim, args), List(manif))
- }
- currentApplication = tree
- newResult
-
case Apply(fn, args) =>
checkSensible(tree.pos, fn, args)
currentApplication = tree
@@ -1531,17 +1522,17 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
)
case _ => ()
}
-
+
// verify classes extending AnyVal meet the requirements
// (whatever those are to be, but at least: @inline annotation)
private def checkAnyValSubclass(clazz: Symbol) = {
if ((clazz isSubClass AnyValClass) && (clazz ne AnyValClass) && !isPrimitiveValueClass(clazz)) {
- if (!clazz.hasAnnotation(ScalaInlineClass))
- unit.error(clazz.pos, "Only @inline classes are allowed to extend AnyVal")
if (clazz.isTrait)
- unit.error(clazz.pos, "Only @inline classes (not traits) are allowed to extend AnyVal")
+ unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
+ /* [Martin] That one is already taken care of by Typers
if (clazz.tpe <:< AnyRefClass.tpe)
unit.error(clazz.pos, "Classes which extend AnyVal may not have an ancestor which inherits AnyRef")
+ */
}
}
@@ -1556,12 +1547,9 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// inside annotations.
applyRefchecksToAnnotations(tree)
var result: Tree = tree match {
- case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
- tree.symbol.resetFlag(DEFERRED)
- transform(treeCopy.DefDef(
- tree, mods, name, tparams, vparams, tpt,
- typed(gen.mkSysErrorCall("native method stub"))
- ))
+ case DefDef(_, _, _, _, _, EmptyTree) if sym hasAnnotation NativeAttr =>
+ sym resetFlag DEFERRED
+ transform(deriveDefDef(tree)(_ => typed(gen.mkSysErrorCall("native method stub"))))
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
@@ -1580,9 +1568,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
-
- if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
- else tree
+ if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => assert(false, "adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc"); dc
case tpt@TypeTree() =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 0ab09b4fec..243e685b13 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -24,7 +24,7 @@ import symtab.Flags._
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
import global._
- import definitions.{ UnitClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
+ import definitions.{ UnitClass, ObjectClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
import analyzer.{ restrictionError }
/** the following two members override abstract members in Transform */
@@ -34,6 +34,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
new SuperAccTransformer(unit)
class SuperAccTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ /** validCurrentOwner arrives undocumented, but I reverse engineer it to be
+ * a flag for needsProtectedAccessor which is false while transforming either
+ * a by-name argument block or a closure. This excludes them from being
+ * considered able to access protected members via subclassing (why?) which in turn
+ * increases the frequency with which needsProtectedAccessor will be true.
+ */
private var validCurrentOwner = true
private val accDefs = mutable.Map[Symbol, ListBuffer[Tree]]()
@@ -41,6 +47,25 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz))
buf += typers(clazz) typed tree
}
+ private def ensureAccessor(sel: Select) = {
+ val Select(qual, name) = sel
+ val sym = sel.symbol
+ val clazz = qual.symbol
+ val supername = nme.superName(name)
+ val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
+ debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
+ val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym
+ val tpe = clazz.thisType memberType sym match {
+ case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
+ case t => t
+ }
+ acc setInfoAndEnter (tpe cloneInfo acc)
+ storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
+ acc
+ }
+
+ atPos(sel.pos)(Select(gen.mkAttributedThis(clazz), superAcc) setType sel.tpe)
+ }
private def transformArgs(params: List[Symbol], args: List[Tree]) = {
treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) =>
@@ -88,42 +113,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- private def transformSuperSelect(tree: Tree): Tree = tree match {
- case Select(sup @ Super(_, mix), name) =>
- val sym = tree.symbol
- val clazz = sup.symbol
-
- if (sym.isDeferred) {
- val member = sym.overridingSymbol(clazz);
- if (mix != tpnme.EMPTY || member == NoSymbol ||
- !((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)))
- unit.error(tree.pos, ""+sym+sym.locationString+" is accessed from super. It may not be abstract "+
- "unless it is overridden by a member declared `abstract' and `override'");
- }
- if (tree.isTerm && mix == tpnme.EMPTY &&
- (clazz.isTrait || clazz != currentOwner.enclClass || !validCurrentOwner)) {
- val supername = nme.superName(sym.name)
- var superAcc = clazz.info.decl(supername).suchThat(_.alias == sym)
- if (superAcc == NoSymbol) {
- debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
- superAcc = clazz.newMethod(supername, tree.pos, SUPERACCESSOR | PRIVATE) setAlias sym
- var superAccTpe = clazz.thisType.memberType(sym)
- if (sym.isModule && !sym.isMethod) {
- // the super accessor always needs to be a method. See #231
- superAccTpe = NullaryMethodType(superAccTpe)
- }
- superAcc setInfoAndEnter (superAccTpe cloneInfo superAcc)
- storeAccessorDefinition(clazz, DefDef(superAcc, EmptyTree))
- }
- atPos(sup.pos) {
- Select(gen.mkAttributedThis(clazz), superAcc) setType tree.tpe;
- }
- } else {
- tree
- }
- case _ =>
- assert(tree.tpe.isError, tree)
- tree
+ private def transformSuperSelect(sel: Select): Tree = {
+ val Select(sup @ Super(_, mix), name) = sel
+ val sym = sel.symbol
+ val clazz = sup.symbol
+
+ if (sym.isDeferred) {
+ val member = sym.overridingSymbol(clazz);
+ if (mix != tpnme.EMPTY || member == NoSymbol ||
+ !((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)))
+ unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
+ "unless it is overridden by a member declared `abstract' and `override'");
+ }
+ if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner))
+ ensureAccessor(sel)
+ else sel
}
// Disallow some super.XX calls targeting Any methods which would
@@ -156,9 +160,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
for (s <- decls) {
if (s.privateWithin.isClass && !s.isProtected && !s.privateWithin.isModuleClass &&
!s.hasFlag(EXPANDEDNAME) && !s.isConstructor) {
+ val savedName = s.name
decls.unlink(s)
s.expandName(s.privateWithin)
decls.enter(s)
+ log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
}
}
if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
@@ -177,7 +183,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case ModuleDef(_, _, _) =>
checkCompanionNameClashes(sym)
super.transform(tree)
- case Template(parents, self, body) =>
+ case Template(_, _, body) =>
val ownAccDefs = new ListBuffer[Tree]
accDefs(currentOwner) = ownAccDefs
@@ -189,7 +195,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val body1 = atOwner(currentOwner)(transformTrees(body))
accDefs -= currentOwner
ownAccDefs ++= body1
- treeCopy.Template(tree, parents, self, ownAccDefs.toList)
+ deriveTemplate(tree)(_ => ownAccDefs.toList)
case TypeApply(sel @ Select(This(_), name), args) =>
mayNeedProtectedAccessor(sel, args, false)
@@ -218,24 +224,47 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// direct calls to aliases of param accessors to the superclass in order to avoid
// duplicating fields.
if (sym.isParamAccessor && sym.alias != NoSymbol) {
- val result = localTyper.typed {
- Select(
- Super(qual, tpnme.EMPTY/*qual.symbol.info.parents.head.symbol.name*/) setPos qual.pos,
- sym.alias) setPos tree.pos
- }
+ val result = (localTyper.typedPos(tree.pos) {
+ Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
+ }).asInstanceOf[Select]
debuglog("alias replacement: " + tree + " ==> " + result);//debug
localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
}
- else mayNeedProtectedAccessor(sel, List(EmptyTree), false)
+ else {
+ /** A trait which extends a class and accesses a protected member
+ * of that class cannot implement the necessary accessor method
+ * because its implementation is in an implementation class (e.g.
+ * Foo$class) which inherits nothing, and jvm access restrictions
+ * require the call site to be in an actual subclass. So non-trait
+ * classes inspect their ancestors for any such situations and
+ * generate the accessors. See SI-2296.
+ */
+ // FIXME - this should be unified with needsProtectedAccessor, but some
+ // subtlety which presently eludes me is foiling my attempts.
+ val shouldEnsureAccessor = (
+ currentClass.isTrait
+ && sym.isProtected
+ && sym.enclClass != currentClass
+ && !sym.owner.isTrait
+ && (sym.owner.enclosingPackageClass != currentPackage)
+ && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ )
+ if (shouldEnsureAccessor) {
+ log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
+ ensureAccessor(sel)
+ }
+ else
+ mayNeedProtectedAccessor(sel, List(EmptyTree), false)
+ }
- case Select(Super(_, mix), name) =>
+ case sel @ Select(Super(_, mix), name) =>
if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
unit.error(tree.pos, "super may be not be used on "+ sym.accessedOrSelf)
}
else if (isDisallowed(sym)) {
unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
}
- transformSuperSelect(tree)
+ transformSuperSelect(sel)
case TypeApply(sel @ Select(qual, name), args) =>
mayNeedProtectedAccessor(sel, args, true)
@@ -280,11 +309,10 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
private def withInvalidOwner[A](trans: => A): A = {
- val prevValidCurrentOwner = validCurrentOwner
+ val saved = validCurrentOwner
validCurrentOwner = false
- val result = trans
- validCurrentOwner = prevValidCurrentOwner
- result
+ try trans
+ finally validCurrentOwner = saved
}
/** Add a protected accessor, if needed, and return a tree that calls
@@ -294,7 +322,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
private def makeAccessor(tree: Select, targs: List[Tree]): Tree = {
val Select(qual, name) = tree
val sym = tree.symbol
- val clazz = hostForAccessorOf(sym, currentOwner.enclClass)
+ val clazz = hostForAccessorOf(sym, currentClass)
assert(clazz != NoSymbol, sym)
debuglog("Decided for host class: " + clazz)
@@ -334,7 +362,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
val selection = Select(This(clazz), protAcc)
def mkApply(fn: Tree) = Apply(fn, qual :: Nil)
- val res = atPos(tree.pos) {
+ val res = atPos(tree.pos) {
targs.head match {
case EmptyTree => mkApply(selection)
case _ => mkApply(TypeApply(selection, targs))
@@ -373,27 +401,27 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
*/
private def makeSetter(tree: Select): Tree = {
val field = tree.symbol
- val clazz = hostForAccessorOf(field, currentOwner.enclClass)
+ val clazz = hostForAccessorOf(field, currentClass)
assert(clazz != NoSymbol, field)
debuglog("Decided for host class: " + clazz)
-
+
val accName = nme.protSetterName(field.originalName)
val protectedAccessor = clazz.info decl accName orElse {
val protAcc = clazz.newMethod(accName, field.pos)
val paramTypes = List(clazz.typeOfThis, field.tpe)
val params = protAcc newSyntheticValueParams paramTypes
val accessorType = MethodType(params, UnitClass.tpe)
-
+
protAcc setInfoAndEnter accessorType
val obj :: value :: Nil = params
storeAccessorDefinition(clazz, DefDef(protAcc, Assign(Select(Ident(obj), field.name), Ident(value))))
-
+
protAcc
}
atPos(tree.pos)(Select(This(clazz), protectedAccessor))
}
- /** Does `sym` need an accessor when accessed from `currentOwner`?
+ /** Does `sym` need an accessor when accessed from `currentClass`?
* A special case arises for classes with explicit self-types. If the
* self type is a Java class, and a protected accessor is needed, we issue
* an error. If the self type is a Scala class, we don't add an accessor.
@@ -407,23 +435,20 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* classes, this has to be signaled as error.
*/
private def needsProtectedAccessor(sym: Symbol, pos: Position): Boolean = {
- val clazz = currentOwner.enclClass
+ val clazz = currentClass
def accessibleThroughSubclassing =
validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
- def packageAccessBoundry(sym: Symbol) = {
- val b = sym.accessBoundary(sym.owner)
- if (b.isPackageClass) b
- else b.enclosingPackageClass
- }
+ def packageAccessBoundry(sym: Symbol) =
+ sym.accessBoundary(sym.enclosingPackageClass)
val isCandidate = (
sym.isProtected
&& sym.isJavaDefined
&& !sym.isDefinedInPackage
&& !accessibleThroughSubclassing
- && (sym.owner.enclosingPackageClass != currentOwner.enclosingPackageClass)
- && (sym.owner.enclosingPackageClass == packageAccessBoundry(sym))
+ && (sym.enclosingPackageClass != currentPackage)
+ && (sym.enclosingPackageClass == sym.accessBoundary(sym.enclosingPackageClass))
)
val host = hostForAccessorOf(sym, clazz)
def isSelfType = !(host.tpe <:< host.typeOfThis) && {
@@ -433,15 +458,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
)
true
}
- def isJavaProtected = host.isTrait && sym.isJavaDefined && {
- restrictionError(pos, unit,
- """|%s accesses protected %s inside a concrete trait method.
- |Add an accessor in a class extending %s as a workaround.""".stripMargin.format(
- clazz, sym, sym.enclClass)
- )
- true
- }
- isCandidate && !host.isPackageClass && !isSelfType && !isJavaProtected
+ isCandidate && !host.isPackageClass && !isSelfType
}
/** Return the innermost enclosing class C of referencingClass for which either
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 3ee5bf601d..def6475221 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -126,8 +126,8 @@ trait SyntheticMethods extends ast.TreeDSL {
* def canEqual(that: Any) = that.isInstanceOf[This]
*/
def canEqualMethod: Tree = (
- createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
- Ident(m.firstParam) IS_OBJ typeCaseType(clazz))
+ createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
)
/** The equality method for case classes.
@@ -143,7 +143,7 @@ trait SyntheticMethods extends ast.TreeDSL {
*/
def equalsClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
val arg0 = Ident(m.firstParam)
- val thatTest = gen.mkIsInstanceOf(arg0, typeCaseType(clazz), true, false)
+ val thatTest = gen.mkIsInstanceOf(arg0, classExistentialType(clazz), true, false)
val thatCast = gen.mkCast(arg0, clazz.tpe)
def argsBody: Tree = {
@@ -259,11 +259,11 @@ trait SyntheticMethods extends ast.TreeDSL {
}
if (phase.id > currentRun.typerPhase.id) templ
- else treeCopy.Template(templ, templ.parents, templ.self,
+ else deriveTemplate(templ)(body =>
if (clazz.isCase) caseTemplateBody()
else synthesize() match {
- case Nil => templ.body // avoiding unnecessary copy
- case ms => templ.body ++ ms
+ case Nil => body // avoiding unnecessary copy
+ case ms => body ++ ms
}
)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4f4087a953..1434002121 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -415,7 +415,7 @@ trait TypeDiagnostics {
"\nIf applicable, you may wish to try moving some members into another object."
)
}
-
+
/** Report a type error.
*
* @param pos0 The position where to report the error
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 66330d4321..24ec0c8028 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -604,6 +604,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* 1. Check that non-function pattern expressions are stable
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
+ * 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
if (tree.symbol.isOverloaded && !inFunMode(mode))
@@ -614,7 +615,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (tree.isErrorTyped) tree
else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
- if (sym.isValue) checkStable(tree)
+ if (sym.isValue) {
+ val tree1 = checkStable(tree)
+ // A module reference in a pattern has type Foo.type, not "object Foo"
+ if (sym.isModule && !sym.isMethod) tree1 setType singleType(pre, sym)
+ else tree1
+ }
else fail()
} else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
fail()
@@ -622,7 +628,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (sym.isStable && pre.isStable && !isByNameParamType(tree.tpe) &&
(isStableContext(tree, mode, pt) || sym.isModule && !sym.isMethod))
tree.setType(singleType(pre, sym))
- else tree
+ // To fully benefit from special casing the return type of
+ // getClass, we have to catch it immediately so expressions
+ // like x.getClass().newInstance() are typed with the type of x.
+ else if ( tree.symbol.name == nme.getClass_
+ && tree.tpe.params.isEmpty
+ // TODO: If the type of the qualifier is inaccessible, we can cause private types
+ // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
+ // so for now it requires the type symbol be public.
+ && pre.typeSymbol.isPublic)
+ tree setType MethodType(Nil, erasure.getClassReturnType(pre))
+ else
+ tree
}
}
@@ -708,8 +725,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def isCodeType(tpe: Type) = tpe.typeSymbol isNonBottomSubClass CodeClass
-
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
@@ -849,6 +864,33 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
+ /**
+ * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+ * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+ *
+ * Consider the following example:
+ *
+ * class AbsWrapperCov[+A]
+ * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+ *
+ * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+ * wrapped // : Wrapped[_ <: T]
+ * }
+ *
+ * this method should type check if and only if Wrapped is covariant in its type parameter
+ *
+ * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+ *
+ * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+ * we can simply replace skolems that represent method type parameters as seen from the method's body
+ * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+ * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+ *
+ * see test/files/../t5189*.scala
+ */
def adaptConstrPattern(): Tree = { // (5)
val extractor = tree.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe)))
if (extractor != NoSymbol) {
@@ -862,7 +904,32 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
.setOriginal(tree)
- inferConstructorInstance(tree1, clazz.typeParams, pt)
+ val skolems = new mutable.ListBuffer[TypeSymbol]
+ object variantToSkolem extends VariantTypeMap {
+ def apply(tp: Type) = mapOver(tp) match {
+ case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
+ val skolem = context.owner.newExistentialSkolem(tpSym, tpSym, unit.freshTypeName("?"+tpSym.name), bounds)
+ // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt)
+ skolems += skolem
+ skolem.tpe
+ case tp1 => tp1
+ }
+ }
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
+ val freeVars = skolems.toList
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
+
+ // tree1's type-slack skolems will be deskolemized (to the method type parameter skolems)
+ // once the containing CaseDef has been type checked (see typedCase)
tree1
} else {
tree
@@ -943,10 +1010,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (tree.isType)
adaptType()
- else if (inExprModeButNot(mode, FUNmode) && tree.symbol != null && tree.symbol.isMacro && !tree.isDef) {
- val tree1 = expandMacro(tree)
- if (tree1.isErroneous) tree1 else typed(tree1, mode, pt)
- } else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
+ else if (inExprModeButNot(mode, FUNmode) && tree.symbol != null && tree.symbol.isMacro && !tree.isDef && !(tree exists (_.isErroneous)))
+ macroExpand(tree, this) match {
+ case Some(expanded: Tree) =>
+ typed(expanded, mode, pt)
+ case None =>
+ setError(tree) // error already reported
+ }
+ else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
adaptConstrPattern()
else if (inAllModes(mode, EXPRmode | FUNmode) &&
!tree.tpe.isInstanceOf[MethodType] &&
@@ -1410,7 +1481,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedClassDef(cdef: ClassDef): Tree = {
// attributes(cdef)
val clazz = cdef.symbol
- val typedMods = removeAnnotations(cdef.mods)
+ val typedMods = typedModifiers(cdef.mods)
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
@@ -1451,7 +1522,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize)
val clazz = mdef.symbol.moduleClass
- val typedMods = removeAnnotations(mdef.mods)
+ val typedMods = typedModifiers(mdef.mods)
assert(clazz != NoSymbol, mdef)
val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
_.typedTemplate(mdef.impl, {
@@ -1509,12 +1580,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
- case vd @ ValDef(mods, name, tpt, EmptyTree) =>
+ case vd @ ValDef(_, _, tpt, EmptyTree) =>
val tpt1 = checkNoEscaping.privates(
clazz.thisSym,
treeCopy.TypeTree(tpt).setOriginal(tpt) setType vd.symbol.tpe
)
- treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
+ copyValDef(vd)(tpt = tpt1, rhs = EmptyTree) setType NoType
}
// was:
// val tpt1 = checkNoEscaping.privates(clazz.thisSym, typedType(tpt))
@@ -1551,8 +1622,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
/** Remove definition annotations from modifiers (they have been saved
* into the symbol's ``annotations'' in the type completer / namer)
+ *
+ * However reification does need annotation definitions to proceed.
+ * Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
+ * The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
+ * that involve locally defined annotations. See more about that in Reifiers.scala.
+ *
+ * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+ * The field doesn't get pickled/unpickled and exists only during a single compilation run.
+ * This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
- def removeAnnotations(mods: Modifiers): Modifiers =
+ def typedModifiers(mods: Modifiers): Modifiers =
mods.copy(annotations = Nil) setPositions mods.positions
/**
@@ -1563,7 +1643,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// attributes(vdef)
val sym = vdef.symbol.initialize
val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
- val typedMods = removeAnnotations(vdef.mods)
+ val typedMods = typedModifiers(vdef.mods)
// complete lazy annotations
val annots = sym.annotations
@@ -1774,7 +1854,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
- val typedMods = removeAnnotations(ddef.mods)
+ val typedMods = typedModifiers(ddef.mods)
var rhs1 =
if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors
if (!meth.isPrimaryConstructor &&
@@ -1827,7 +1907,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
- val typedMods = removeAnnotations(tdef.mods)
+ val typedMods = typedModifiers(tdef.mods)
// complete lazy annotations
val annots = tdef.symbol.annotations
@@ -1862,8 +1942,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val restpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs, restpe)
ldef.params foreach (param => param.tpe = param.symbol.tpe)
- treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
- } else {
+ deriveLabelDef(ldef)(_ => rhs1) setType restpe
+ }
+ else {
val initpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs)
val restpe = rhs1.tpe
@@ -1876,7 +1957,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
ldef.params foreach (param => param.tpe = param.symbol.tpe)
- treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs2) setSymbol sym2 setType restpe
+ deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
}
}
@@ -1981,18 +2062,38 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanClass.tpe)
var body1: Tree = typed(cdef.body, pt)
- if (!context.savedTypeBounds.isEmpty) {
- body1.tpe = context.restoreTypeBounds(body1.tpe)
- if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
- // @M no need for pt.normalize here, is done in erasure
+
+ val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
+ if (contextWithTypeBounds.savedTypeBounds nonEmpty) {
+ body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
+
+ // insert a cast if something typechecked under the GADT constraints,
+ // but not in real life (i.e., now that's we've reset the method's type skolems'
+ // infos back to their pre-GADT-constraint state)
+ if (isFullyDefined(pt) && !(body1.tpe <:< pt))
body1 = typedPos(body1.pos)(gen.mkCast(body1, pt))
- }
+
}
+
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
- treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
+ val treeWithSkolems = treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
+
+ // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
+ // TODO: Paul, can we do the deskolemization lazily in the old pattern matcher
+ object deskolemizeOnce extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case TypeRef(pre, sym, args) if sym.isExistentialSkolem && sym.deSkolemize.isSkolem && sym.deSkolemize.owner.isTerm =>
+ typeRef(NoPrefix, sym.deSkolemize, args)
+ case tp1 => tp1
+ }
+ }
+
+ new TypeMapTreeSubstituter(deskolemizeOnce).traverse(treeWithSkolems)
+
+ treeWithSkolems // now without skolems, actually
}
- def typedCases(tree: Tree, cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
+ def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
@@ -2005,8 +2106,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
*/
def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
val numVparams = fun.vparams.length
- val codeExpected = !forMSIL && (pt.typeSymbol isNonBottomSubClass CodeClass)
-
if (numVparams > definitions.MaxFunctionArity)
return MaxFunctionArityError(fun)
@@ -2023,7 +2122,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
- val (clazz, argpts, respt) = decompose(if (codeExpected) pt.normalize.typeArgs.head else pt)
+ val (clazz, argpts, respt) = decompose(pt)
if (argpts.lengthCompare(numVparams) != 0)
WrongNumberOfParametersError(fun, argpts)
else {
@@ -2033,7 +2132,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (isFullyDefined(argpt)) argpt
else {
fun match {
- case etaExpansion(vparams, fn, args) if !codeExpected =>
+ case etaExpansion(vparams, fn, args) =>
silent(_.typed(fn, forFunMode(mode), pt)) match {
case SilentResultValue(fn1) if context.undetparams.isEmpty =>
// if context,undetparams is not empty, the function was polymorphic,
@@ -2065,13 +2164,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val restpe = packedType(body1, fun.symbol).deconst.resultType
val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
// body = checkNoEscaping.locals(context.scope, restpe, body)
- val fun1 = treeCopy.Function(fun, vparams, body1).setType(funtpe)
- if (codeExpected) lifted(fun1) else fun1
- }
+ treeCopy.Function(fun, vparams, body1).setType(funtpe)
}
-
- def lifted(tree: Tree): Tree = typedPos(tree.pos) {
- Apply(Select(Ident(CodeModule), nme.lift_), List(tree))
}
def typedRefinement(stats: List[Tree]) {
@@ -2181,6 +2275,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// error for this is issued in RefChecks.checkDefaultsInOverloaded
if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
!e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
+ log("Double definition detected:\n " +
+ ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
+ ((e1.sym.getClass, e1.sym.info, e1.sym.ownerChain)))
+
DefDefinedTwiceError(e.sym, e1.sym)
scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
}
@@ -2794,7 +2892,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (hasError) annotationError
- else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setPos(ann.pos)
+ else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(ann).setPos(ann.pos)
}
} else if (requireJava) {
reportAnnotationError(NestedAnnotationError(ann, annType))
@@ -2834,7 +2932,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- AnnotationInfo(annType, args, List()).setPos(t.pos)
+ AnnotationInfo(annType, args, List()).setOriginal(ann).setPos(t.pos)
case Block(stats, expr) =>
context.warning(t.pos, "Usage of named or default arguments transformed this annotation\n"+
@@ -2869,6 +2967,33 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
sym.isTypeParameter && sym.owner.isJavaDefined
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+
+ (hidden map { s =>
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ (s, s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }).toMap
+ }
+
/** Given a set `rawSyms` of term- and type-symbols, and a type
* `tp`, produce a set of fresh type parameters and a type so that
* it can be abstracted to an existential type. Every type symbol
@@ -2886,12 +3011,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* only the type of the Ident is changed.
*/
protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
val typeParams: List[Symbol] = rawSyms map { sym =>
val name = sym.name match {
case x: TypeName => x
- case x => newTypeName(x + ".type")
+ case x => nme.singletonName(x)
}
- val bound = sym.existentialBound
+ val bound = allBounds(sym)
val sowner = if (isRawParameter(sym)) context.owner else sym.owner
val quantified = sowner.newExistential(name, sym.pos)
@@ -3015,40 +3141,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
packSymbols(localSyms.toList, normalizedTpe)
}
- /** Replace type parameters with their TypeSkolems, which can later
- * be deskolemized to the original type param. (A skolem is a
- * representation of a bound variable when viewed inside its scope)
- * !!!Adriaan: this does not work for hk types.
- */
- def skolemizeTypeParams(tparams: List[TypeDef]): List[TypeDef] = {
- class Deskolemizer extends LazyType {
- override val typeParams = tparams map (_.symbol)
- val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
- // Replace the symbols
- def substitute() = map2(tparams, typeSkolems)(_ setSymbol _)
- override def complete(sym: Symbol) {
- // The info of a skolem is the skolemized info of the
- // actual type parameter of the skolem
- sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems)
- }
- }
- (new Deskolemizer).substitute()
- }
- /** Convert to corresponding type parameters all skolems of method
- * parameters which appear in `tparams`.
- */
- def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
- class DeSkolemizeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
- mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
- case _ =>
- mapOver(tp)
- }
- }
- new DeSkolemizeMap mapOver tp
- }
-
def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
if (!checkClassType(tpt, true, false) && noGen) tpt
else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
@@ -3305,7 +3397,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
} else {
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt)
+ var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
if (isPastTyper || !opt.virtPatmat) {
val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
@@ -3321,7 +3413,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
case Block(vd :: Nil, tree@Match(selector, cases)) =>
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt)
+ var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
if (needAdapt)
cases1 = cases1 map (adaptCase(_, owntype))
@@ -3657,8 +3749,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (ps.isEmpty)
ps = site.parents filter (_.typeSymbol.toInterface.name == mix)
if (ps.isEmpty) {
- if (settings.debug.value)
- Console.println(site.parents map (_.typeSymbol.name))//debug
+ debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
// println(qual1)
// println(clazz)
@@ -3733,7 +3824,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
if (!reallyExists(sym)) {
- if (context.owner.toplevelClass.isJavaDefined && name.isTypeName) {
+ if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
}
@@ -3805,7 +3896,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null)
unit.warning(tree.pos, "potential null pointer dereference: "+tree)
- val selection = result match {
+ result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
treeCopy.SelectFromTypeTree(
@@ -3827,22 +3918,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case _ =>
result
}
- // To fully benefit from special casing the return type of
- // getClass, we have to catch it immediately so expressions
- // like x.getClass().newInstance() are typed with the type of x.
- val isRefinableGetClass = (
- !selection.isErrorTyped
- && selection.symbol.name == nme.getClass_
- && selection.tpe.params.isEmpty
- // TODO: If the type of the qualifier is inaccessible, we can cause private types
- // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
- // so for now it requires the type symbol be public.
- && qual.tpe.typeSymbol.isPublic
- )
- if (isRefinableGetClass)
- selection setType MethodType(Nil, getClassReturnType(qual.tpe))
- else
- selection
}
}
@@ -4120,7 +4195,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def adaptCase(cdef: CaseDef, tpe: Type): CaseDef =
- treeCopy.CaseDef(cdef, cdef.pat, cdef.guard, adapt(cdef.body, mode, tpe))
+ deriveCaseDef(cdef)(adapt(_, mode, tpe))
// begin typed1
val sym: Symbol = tree.symbol
@@ -4224,7 +4299,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Try(block, catches, finalizer) =>
var block1 = typed(block, pt)
- var catches1 = typedCases(tree, catches, ThrowableClass.tpe, pt)
+ var catches1 = typedCases(catches, ThrowableClass.tpe, pt)
val finalizer1 = if (finalizer.isEmpty) finalizer
else typed(finalizer, UnitClass.tpe)
val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
@@ -4232,6 +4307,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
block1 = adapt(block1, mode, owntype)
catches1 = catches1 map (adaptCase(_, owntype))
}
+
+ if(!isPastTyper && opt.virtPatmat) {
+ catches1 = (MatchTranslator(this)).translateTry(catches1, owntype, tree.pos)
+ }
+
treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
case Throw(expr) =>
@@ -4518,13 +4598,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def expandMacro(tree: Tree): Tree =
- macroExpand(tree, context) match {
- case Some(t: Tree) => t
- case Some(t) => MacroExpandError(tree, t)
- case None => setError(tree) // error already reported
- }
-
def atOwner(owner: Symbol): Typer =
newTyper(context.make(context.tree, owner))
@@ -4641,7 +4714,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case None => typed(tree, mode, pt)
}
- def findManifest(tp: Type, full: Boolean) = atPhase(currentRun.typerPhase) {
+ def findManifest(tp: Type, full: Boolean) = beforeTyper {
inferImplicit(
EmptyTree,
appliedType((if (full) FullManifestClass else PartialManifestClass).typeConstructor, List(tp)),
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index b8bc0946c1..cc272b7b8d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -22,7 +22,7 @@ trait Unapplies extends ast.TreeDSL
import treeInfo.{ isRepeatedParamType, isByNameParamType }
private val unapplyParamName = nme.x_0
-
+
/** returns type list for return type of the extraction */
def unapplyTypeList(ufn: Symbol, ufntpe: Type) = {
assert(ufn.isMethod, ufn)
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 1db6c38b4d..fbe92e5d84 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -71,13 +71,35 @@ object DocStrings {
* Every section starts with a `@` and extends to the next `@`, or
* to the end of the comment string, but excluding the final two
* characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
*/
def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
case List() => List()
- case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ case idxs => {
+ val idxs2 = mergeUsecaseSections(str, idxs)
+ idxs2 zip (idxs2.tail ::: List(str.length - 2))
+ }
}
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.find(str.substring(_).startsWith("@usecase")) match {
+ case Some(firstUC) =>
+ val commentSections = idxs.take(idxs.indexOf(firstUC))
+ val usecaseSections = idxs.drop(idxs.indexOf(firstUC)).filter(str.substring(_).startsWith("@usecase"))
+ commentSections ::: usecaseSections
+ case None =>
+ idxs
+ }
+ }
+
/** Does interval `iv` start with given `tag`?
*/
def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
diff --git a/src/compiler/scala/tools/nsc/util/ProxyReport.scala b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
index 2f4f029308..4fc86c3a32 100644
--- a/src/compiler/scala/tools/nsc/util/ProxyReport.scala
+++ b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
@@ -141,6 +141,6 @@ object ProxyReportRunner {
s.processArguments(args.toList.tail, true)
val g = new ProxyGlobal(s)
val run = new g.Run()
- g.atPhase(run.typerPhase.next)(g.proxyReport.generate(dir))
+ g.afterTyper(g.proxyReport.generate(dir))
}
}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index f7c27dceb5..d1cdd30dd8 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -20,7 +20,7 @@ class Statistics extends scala.reflect.internal.util.Statistics {
val typedSelectCount = new Counter
val typerNanos = new Timer
val classReadNanos = new Timer
-
+
val failedApplyNanos = new Timer
val failedOpEqNanos = new Timer
val failedSilentNanos = new Timer
diff --git a/src/compiler/scala/tools/nsc/util/WeakHashSet.scala b/src/compiler/scala/tools/nsc/util/WeakHashSet.scala
new file mode 100644
index 0000000000..6a10422b00
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/WeakHashSet.scala
@@ -0,0 +1,60 @@
+package scala.tools.nsc.util
+
+import scala.collection.mutable
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.mutable.Builder
+import scala.collection.mutable.SetBuilder
+import scala.runtime.AbstractFunction1
+
+/** A bare-bones implementation of a mutable `Set` that uses weak references
+ * to hold the elements.
+ *
+ * This implementation offers only add/remove/test operations,
+ * therefore it does not fulfill the contract of Scala collection sets.
+ */
+class WeakHashSet[T <: AnyRef] extends AbstractFunction1[T, Boolean] {
+ private val underlying = mutable.HashSet[WeakReferenceWithEquals[T]]()
+
+ /** Add the given element to this set. */
+ def +=(elem: T): this.type = {
+ underlying += new WeakReferenceWithEquals(elem)
+ this
+ }
+
+ /** Remove the given element from this set. */
+ def -=(elem: T): this.type = {
+ underlying -= new WeakReferenceWithEquals(elem)
+ this
+ }
+
+ /** Does the given element belong to this set? */
+ def contains(elem: T): Boolean =
+ underlying.contains(new WeakReferenceWithEquals(elem))
+
+ /** Does the given element belong to this set? */
+ def apply(elem: T): Boolean = contains(elem)
+
+ /** Return the number of elements in this set, including reclaimed elements. */
+ def size = underlying.size
+
+ /** Remove all elements in this set. */
+ def clear() = underlying.clear()
+}
+
+/** A WeakReference implementation that implements equals and hashCode by
+ * delegating to the referent.
+ */
+class WeakReferenceWithEquals[T <: AnyRef](ref: T) {
+ def get(): T = underlying.get()
+
+ override val hashCode = ref.hashCode
+
+ override def equals(other: Any): Boolean = other match {
+ case wf: WeakReferenceWithEquals[_] =>
+ underlying.get() == wf.get()
+ case _ =>
+ false
+ }
+
+ private val underlying = new java.lang.ref.WeakReference(ref)
+}
diff --git a/src/compiler/scala/tools/util/EditDistance.scala b/src/compiler/scala/tools/util/EditDistance.scala
index 5f152ecabb..0af34020a8 100644
--- a/src/compiler/scala/tools/util/EditDistance.scala
+++ b/src/compiler/scala/tools/util/EditDistance.scala
@@ -8,7 +8,7 @@ package util
object EditDistance {
import java.lang.Character.{ toLowerCase => lower }
-
+
def similarString(name: String, allowed: TraversableOnce[String]): String = {
val suggested = suggestions(name, allowed.toSeq, maxDistance = 1, maxSuggestions = 2)
if (suggested.isEmpty) ""
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index ab1510bd7f..af0d768607 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -50,7 +50,27 @@ abstract class CPSAnnotationChecker extends CPSUtils {
// @plus @cps will fall through and compare the @cps type args
// @cps parameters must match exactly
- (annots1 corresponds annots2)(_.atp <:< _.atp)
+ if ((annots1 corresponds annots2)(_.atp <:< _.atp))
+ return true
+
+ // Need to handle uninstantiated type vars specially:
+
+ // g map (x => x) with expected type List[Int] @cps
+ // results in comparison ?That <:< List[Int] @cps
+
+ // Instantiating ?That to an annotated type would fail during
+ // transformation.
+
+ // Instead we force-compare tpe1 <:< tpe2.withoutAnnotations
+ // to trigger instantiation of the TypeVar to the base type
+
+ // This is a bit unorthodox (we're only supposed to look at
+ // annotations here) but seems to work.
+
+ if (!annots2.isEmpty && !tpe1.isGround)
+ return tpe1 <:< tpe2.withoutAnnotations
+
+ false
}
/** Refine the computed least upper bound of a list of types.
@@ -222,6 +242,9 @@ abstract class CPSAnnotationChecker extends CPSUtils {
case OverloadedType(pre, alts) =>
OverloadedType(pre, alts.map((sym: Symbol) => updateAttributes(pre.memberType(sym), annots)))
*/
+ case OverloadedType(pre, alts) => tpe //reconstruct correct annotations later
+ case MethodType(params, restpe) => tpe
+ case PolyType(params, restpe) => tpe
case _ =>
assert(childAnnots forall (_ matches MarkerCPSTypes), childAnnots)
/*
@@ -229,7 +252,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
plus + [] = plus
cps + [] = cps
plus cps + [] = plus cps
- minus cps + [] = minus cp
+ minus cps + [] = minus cps
synth cps + [] = synth cps // <- synth on left - does it happen?
[] + cps = cps
@@ -313,18 +336,34 @@ abstract class CPSAnnotationChecker extends CPSUtils {
def single(xs: List[AnnotationInfo]) = xs match {
case List(x) => x
case _ =>
- global.globalError("not a single cps annotation: " + xs)// FIXME: error message
+ global.globalError("not a single cps annotation: " + xs)
xs(0)
}
+
+ def emptyOrSingleList(xs: List[AnnotationInfo]) = if (xs.isEmpty) Nil else List(single(xs))
def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = {
- val children = childTrees.flatMap { t =>
+ def inspect(t: Tree): List[AnnotationInfo] = {
if (t.tpe eq null) Nil else {
+ val extra: List[AnnotationInfo] = t.tpe match {
+ case _: MethodType | _: PolyType | _: OverloadedType =>
+ // method types, poly types and overloaded types do not obtain cps annotions by propagation
+ // need to reconstruct transitively from their children.
+ t match {
+ case Select(qual, name) => inspect(qual)
+ case Apply(fun, args) => (fun::(transArgList(fun,args).flatten)) flatMap inspect
+ case TypeApply(fun, args) => (fun::(transArgList(fun,args).flatten)) flatMap inspect
+ case _ => Nil
+ }
+ case _ => Nil
+ }
+
val types = cpsParamAnnotation(t.tpe)
// TODO: check that it has been adapted and if so correctly
- if (types.isEmpty) Nil else List(single(types))
+ extra ++ emptyOrSingleList(types)
}
}
+ val children = childTrees flatMap inspect
val newtpe = updateAttributesFromChildren(tpe, children, byName)
@@ -359,9 +398,15 @@ abstract class CPSAnnotationChecker extends CPSUtils {
transChildrenInOrder(tree, tpe, qual::(transArgList(fun, args).flatten), Nil)
+ case Apply(TypeApply(fun @ Select(qual, name), targs), args) if fun.isTyped => // not trigge
+
+ vprintln("[checker] checking select apply type-apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, qual::(transArgList(fun, args).flatten), Nil)
+
case TypeApply(fun @ Select(qual, name), args) if fun.isTyped =>
def stripNullaryMethodType(tp: Type) = tp match { case NullaryMethodType(restpe) => restpe case tp => tp }
- vprintln("[checker] checking select apply " + tree + "/" + tpe)
+ vprintln("[checker] checking select type-apply " + tree + "/" + tpe)
transChildrenInOrder(tree, stripNullaryMethodType(tpe), List(qual, fun), Nil)
@@ -373,7 +418,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
case TypeApply(fun, args) =>
- vprintln("[checker] checking type apply " + tree + "/" + tpe)
+ vprintln("[checker] checking unknown type apply " + tree + "/" + tpe)
transChildrenInOrder(tree, tpe, List(fun), Nil)
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 8bbda5dd05..075009ce5e 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -12,7 +12,7 @@ trait CPSUtils {
var cpsEnabled = true
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
def vprintln(x: =>Any): Unit = if (verbose) println(x)
-
+
object cpsNames {
val catches = newTermName("$catches")
val ex = newTermName("$ex")
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index cea558d2d3..1189cc2e38 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -47,20 +47,20 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// ValDef case here.
case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- log("transforming " + dd.symbol)
+ debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
- log("result "+rhs1)
- log("result is of type "+rhs1.tpe)
+ debuglog("result "+rhs1)
+ debuglog("result is of type "+rhs1.tpe)
treeCopy.DefDef(dd, mods, name, transformTypeDefs(tparams), transformValDefss(vparamss),
transform(tpt), rhs1)
}
case ff @ Function(vparams, body) =>
- log("transforming anon function " + ff.symbol)
+ debuglog("transforming anon function " + ff.symbol)
atOwner(ff.symbol) {
@@ -88,22 +88,26 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
transExpr(body, None, ext)
}
- log("result "+body1)
- log("result is of type "+body1.tpe)
+ debuglog("result "+body1)
+ debuglog("result is of type "+body1.tpe)
treeCopy.Function(ff, transformValDefs(vparams), body1)
}
case vd @ ValDef(mods, name, tpt, rhs) => // object-level valdefs
- log("transforming valdef " + vd.symbol)
+ debuglog("transforming valdef " + vd.symbol)
- atOwner(vd.symbol) {
+ if (getExternalAnswerTypeAnn(tpt.tpe).isEmpty) {
+
+ atOwner(vd.symbol) {
- assert(getExternalAnswerTypeAnn(tpt.tpe) == None)
+ val rhs1 = transExpr(rhs, None, None)
- val rhs1 = transExpr(rhs, None, None)
-
- treeCopy.ValDef(vd, mods, name, transform(tpt), rhs1)
+ treeCopy.ValDef(vd, mods, name, transform(tpt), rhs1)
+ }
+ } else {
+ unit.error(tree.pos, "cps annotations not allowed on by-value parameters or value definitions")
+ super.transform(tree)
}
case TypeTree() =>
@@ -298,8 +302,8 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
if (!expr.isEmpty && (expr.tpe.typeSymbol ne NothingClass)) {
// must convert!
- log("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe + ")")
- log("cps type conversion (expected: " + cpsR.get + "): " + expr)
+ debuglog("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe + ")")
+ debuglog("cps type conversion (expected: " + cpsR.get + "): " + expr)
if (!hasPlusMarker(expr.tpe))
unit.warning(tree.pos, "expression " + tree + " is cps-transformed unexpectedly")
@@ -322,10 +326,10 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
} else if (!cpsR.isDefined && bot.isDefined) {
// error!
- log("cps type error: " + expr)
+ debuglog("cps type error: " + expr)
//println("cps type error: " + expr + "/" + expr.tpe + "/" + getAnswerTypeAnn(expr.tpe))
- println(cpsR + "/" + spc + "/" + bot)
+ //println(cpsR + "/" + spc + "/" + bot)
unit.error(tree.pos, "found cps expression in non-cps position")
} else {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index b2a1546b4e..6453671eac 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -39,10 +39,10 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val newtp = transformCPSType(tp)
if (newtp != tp)
- log("transformInfo changed type for " + sym + " to " + newtp);
+ debuglog("transformInfo changed type for " + sym + " to " + newtp);
if (sym == MethReifyR)
- log("transformInfo (not)changed type for " + sym + " to " + newtp);
+ debuglog("transformInfo (not)changed type for " + sym + " to " + newtp);
newtp
}
@@ -83,13 +83,13 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case Apply(TypeApply(fun, targs), args)
if (fun.symbol == MethShift) =>
- log("found shift: " + tree)
+ debuglog("found shift: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethShiftR) // TODO: correct?
//gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
//ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
//gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct?
- log(funR.tpe)
+ debuglog("funR.tpe = " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -98,10 +98,10 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case Apply(TypeApply(fun, targs), args)
if (fun.symbol == MethShiftUnit) =>
- log("found shiftUnit: " + tree)
+ debuglog("found shiftUnit: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
- log(funR.tpe)
+ debuglog("funR.tpe = " + funR.tpe)
Apply(
TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
List(targs(0).tpe, targs(1).tpe))),
@@ -114,7 +114,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
log("found reify: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
- log(funR.tpe)
+ debuglog("funR.tpe = " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -203,7 +203,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
rhs.changeOwner(currentOwner -> fun.symbol)
val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
- val catch2 = { localTyper.typedCases(tree, List(
+ val catch2 = { localTyper.typedCases(List(
CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
Apply(Select(Ident(funSym), nme.isDefinedAt), List(Ident(exSym))),
Apply(Ident(funSym), List(Ident(exSym))))
@@ -258,17 +258,17 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case vd @ ValDef(mods, name, tpt, rhs)
if (vd.symbol.hasAnnotation(MarkerCPSSym)) =>
- log("found marked ValDef "+name+" of type " + vd.symbol.tpe)
+ debuglog("found marked ValDef "+name+" of type " + vd.symbol.tpe)
val tpe = vd.symbol.tpe
val rhs1 = atOwner(vd.symbol) { transform(rhs) }
rhs1.changeOwner(vd.symbol -> currentOwner) // TODO: don't traverse twice
- log("valdef symbol " + vd.symbol + " has type " + tpe)
- log("right hand side " + rhs1 + " has type " + rhs1.tpe)
+ debuglog("valdef symbol " + vd.symbol + " has type " + tpe)
+ debuglog("right hand side " + rhs1 + " has type " + rhs1.tpe)
- log("currentOwner: " + currentOwner)
- log("currentMethod: " + currentMethod)
+ debuglog("currentOwner: " + currentOwner)
+ debuglog("currentMethod: " + currentMethod)
val (bodyStms, bodyExpr) = transBlock(rest, expr)
// FIXME: result will later be traversed again by TreeSymSubstituter and
@@ -308,12 +308,12 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// see note about multiple traversals above
- log("fun.symbol: "+fun.symbol)
- log("fun.symbol.owner: "+fun.symbol.owner)
- log("arg.owner: "+arg.owner)
+ debuglog("fun.symbol: "+fun.symbol)
+ debuglog("fun.symbol.owner: "+fun.symbol.owner)
+ debuglog("arg.owner: "+arg.owner)
- log("fun.tpe:"+fun.tpe)
- log("return type of fun:"+body1.tpe)
+ debuglog("fun.tpe:"+fun.tpe)
+ debuglog("return type of fun:"+body1.tpe)
var methodName = nme.map
@@ -324,7 +324,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
else
unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
- log("will use method:"+methodName)
+ debuglog("will use method:"+methodName)
localTyper.typed(atPos(vd.symbol.pos) {
Apply(Select(ctxR, ctxR.tpe.member(methodName)), List(fun))
@@ -335,7 +335,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
try {
if (specialCaseTrivial) {
- log("will optimize possible tail call: " + bodyExpr)
+ debuglog("will optimize possible tail call: " + bodyExpr)
// FIXME: flatMap impl has become more complicated due to
// exceptions. do we need to put a try/catch in the then part??
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 3fad92cbf1..e9389e9acb 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -1,669 +1,2324 @@
/*
+
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.locks.*;
-import java.util.concurrent.atomic.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+//import java.util.concurrent.AbstractExecutorService;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+//import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.AbstractQueuedSynchronizer;
+import java.util.concurrent.locks.Condition;
+
+interface RunnableFuture<T> extends Runnable {
+ //TR placeholder for java.util.concurrent.RunnableFuture
+}
/**
- * An {@link ExecutorService} for running {@link ForkJoinTask}s. A
- * ForkJoinPool provides the entry point for submissions from
- * non-ForkJoinTasks, as well as management and monitoring operations.
- * Normally a single ForkJoinPool is used for a large number of
- * submitted tasks. Otherwise, use would not usually outweigh the
- * construction and bookkeeping overhead of creating a large set of
- * threads.
+ * An {@link ExecutorService} for running {@link ForkJoinTask}s.
+ * A {@code ForkJoinPool} provides the entry point for submissions
+ * from non-{@code ForkJoinTask} clients, as well as management and
+ * monitoring operations.
*
- * <p>ForkJoinPools differ from other kinds of Executors mainly in
- * that they provide <em>work-stealing</em>: all threads in the pool
- * attempt to find and execute subtasks created by other active tasks
- * (eventually blocking if none exist). This makes them efficient when
- * most tasks spawn other subtasks (as do most ForkJoinTasks), as well
- * as the mixed execution of some plain Runnable- or Callable- based
- * activities along with ForkJoinTasks. When setting
- * <tt>setAsyncMode</tt>, a ForkJoinPools may also be appropriate for
- * use with fine-grained tasks that are never joined. Otherwise, other
- * ExecutorService implementations are typically more appropriate
- * choices.
+ * <p>A {@code ForkJoinPool} differs from other kinds of {@link
+ * ExecutorService} mainly by virtue of employing
+ * <em>work-stealing</em>: all threads in the pool attempt to find and
+ * execute tasks submitted to the pool and/or created by other active
+ * tasks (eventually blocking waiting for work if none exist). This
+ * enables efficient processing when most tasks spawn other subtasks
+ * (as do most {@code ForkJoinTask}s), as well as when many small
+ * tasks are submitted to the pool from external clients. Especially
+ * when setting <em>asyncMode</em> to true in constructors, {@code
+ * ForkJoinPool}s may also be appropriate for use with event-style
+ * tasks that are never joined.
*
- * <p>A ForkJoinPool may be constructed with a given parallelism level
- * (target pool size), which it attempts to maintain by dynamically
- * adding, suspending, or resuming threads, even if some tasks are
- * waiting to join others. However, no such adjustments are performed
- * in the face of blocked IO or other unmanaged synchronization. The
- * nested <code>ManagedBlocker</code> interface enables extension of
- * the kinds of synchronization accommodated. The target parallelism
- * level may also be changed dynamically (<code>setParallelism</code>)
- * and thread construction can be limited using methods
- * <code>setMaximumPoolSize</code> and/or
- * <code>setMaintainsParallelism</code>.
+ * <p>A {@code ForkJoinPool} is constructed with a given target
+ * parallelism level; by default, equal to the number of available
+ * processors. The pool attempts to maintain enough active (or
+ * available) threads by dynamically adding, suspending, or resuming
+ * internal worker threads, even if some tasks are stalled waiting to
+ * join others. However, no such adjustments are guaranteed in the
+ * face of blocked IO or other unmanaged synchronization. The nested
+ * {@link ManagedBlocker} interface enables extension of the kinds of
+ * synchronization accommodated.
*
* <p>In addition to execution and lifecycle control methods, this
* class provides status check methods (for example
- * <code>getStealCount</code>) that are intended to aid in developing,
+ * {@link #getStealCount}) that are intended to aid in developing,
* tuning, and monitoring fork/join applications. Also, method
- * <code>toString</code> returns indications of pool state in a
+ * {@link #toString} returns indications of pool state in a
* convenient form for informal monitoring.
*
+ * <p> As is the case with other ExecutorServices, there are three
+ * main task execution methods summarized in the following table.
+ * These are designed to be used primarily by clients not already
+ * engaged in fork/join computations in the current pool. The main
+ * forms of these methods accept instances of {@code ForkJoinTask},
+ * but overloaded forms also allow mixed execution of plain {@code
+ * Runnable}- or {@code Callable}- based activities as well. However,
+ * tasks that are already executing in a pool should normally instead
+ * use the within-computation forms listed in the table unless using
+ * async event-style tasks that are not usually joined, in which case
+ * there is little difference among choice of methods.
+ *
+ * <table BORDER CELLPADDING=3 CELLSPACING=1>
+ * <tr>
+ * <td></td>
+ * <td ALIGN=CENTER> <b>Call from non-fork/join clients</b></td>
+ * <td ALIGN=CENTER> <b>Call from within fork/join computations</b></td>
+ * </tr>
+ * <tr>
+ * <td> <b>Arrange async execution</td>
+ * <td> {@link #execute(ForkJoinTask)}</td>
+ * <td> {@link ForkJoinTask#fork}</td>
+ * </tr>
+ * <tr>
+ * <td> <b>Await and obtain result</td>
+ * <td> {@link #invoke(ForkJoinTask)}</td>
+ * <td> {@link ForkJoinTask#invoke}</td>
+ * </tr>
+ * <tr>
+ * <td> <b>Arrange exec and obtain Future</td>
+ * <td> {@link #submit(ForkJoinTask)}</td>
+ * <td> {@link ForkJoinTask#fork} (ForkJoinTasks <em>are</em> Futures)</td>
+ * </tr>
+ * </table>
+ *
+ * <p><b>Sample Usage.</b> Normally a single {@code ForkJoinPool} is
+ * used for all parallel task execution in a program or subsystem.
+ * Otherwise, use would not usually outweigh the construction and
+ * bookkeeping overhead of creating a large set of threads. For
+ * example, a common pool could be used for the {@code SortTasks}
+ * illustrated in {@link RecursiveAction}. Because {@code
+ * ForkJoinPool} uses threads in {@linkplain java.lang.Thread#isDaemon
+ * daemon} mode, there is typically no need to explicitly {@link
+ * #shutdown} such a pool upon program exit.
+ *
+ * <pre> {@code
+ * static final ForkJoinPool mainPool = new ForkJoinPool();
+ * ...
+ * public void sort(long[] array) {
+ * mainPool.invoke(new SortTask(array, 0, array.length));
+ * }}</pre>
+ *
* <p><b>Implementation notes</b>: This implementation restricts the
* maximum number of running threads to 32767. Attempts to create
- * pools with greater than the maximum result in
- * IllegalArgumentExceptions.
+ * pools with greater than the maximum number result in
+ * {@code IllegalArgumentException}.
+ *
+ * <p>This implementation rejects submitted tasks (that is, by throwing
+ * {@link RejectedExecutionException}) only when the pool is shut down
+ * or internal resources have been exhausted.
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public class ForkJoinPool /*extends AbstractExecutorService*/ {
/*
- * See the extended comments interspersed below for design,
- * rationale, and walkthroughs.
+ * Implementation Overview
+ *
+ * This class and its nested classes provide the main
+ * functionality and control for a set of worker threads:
+ * Submissions from non-FJ threads enter into submission queues.
+ * Workers take these tasks and typically split them into subtasks
+ * that may be stolen by other workers. Preference rules give
+ * first priority to processing tasks from their own queues (LIFO
+ * or FIFO, depending on mode), then to randomized FIFO steals of
+ * tasks in other queues.
+ *
+ * WorkQueues
+ * ==========
+ *
+ * Most operations occur within work-stealing queues (in nested
+ * class WorkQueue). These are special forms of Deques that
+ * support only three of the four possible end-operations -- push,
+ * pop, and poll (aka steal), under the further constraints that
+ * push and pop are called only from the owning thread (or, as
+ * extended here, under a lock), while poll may be called from
+ * other threads. (If you are unfamiliar with them, you probably
+ * want to read Herlihy and Shavit's book "The Art of
+ * Multiprocessor programming", chapter 16 describing these in
+ * more detail before proceeding.) The main work-stealing queue
+ * design is roughly similar to those in the papers "Dynamic
+ * Circular Work-Stealing Deque" by Chase and Lev, SPAA 2005
+ * (http://research.sun.com/scalable/pubs/index.html) and
+ * "Idempotent work stealing" by Michael, Saraswat, and Vechev,
+ * PPoPP 2009 (http://portal.acm.org/citation.cfm?id=1504186).
+ * The main differences ultimately stem from GC requirements that
+ * we null out taken slots as soon as we can, to maintain as small
+ * a footprint as possible even in programs generating huge
+ * numbers of tasks. To accomplish this, we shift the CAS
+ * arbitrating pop vs poll (steal) from being on the indices
+ * ("base" and "top") to the slots themselves. So, both a
+ * successful pop and poll mainly entail a CAS of a slot from
+ * non-null to null. Because we rely on CASes of references, we
+ * do not need tag bits on base or top. They are simple ints as
+ * used in any circular array-based queue (see for example
+ * ArrayDeque). Updates to the indices must still be ordered in a
+ * way that guarantees that top == base means the queue is empty,
+ * but otherwise may err on the side of possibly making the queue
+ * appear nonempty when a push, pop, or poll have not fully
+ * committed. Note that this means that the poll operation,
+ * considered individually, is not wait-free. One thief cannot
+ * successfully continue until another in-progress one (or, if
+ * previously empty, a push) completes. However, in the
+ * aggregate, we ensure at least probabilistic non-blockingness.
+ * If an attempted steal fails, a thief always chooses a different
+ * random victim target to try next. So, in order for one thief to
+ * progress, it suffices for any in-progress poll or new push on
+ * any empty queue to complete. (This is why we normally use
+ * method pollAt and its variants that try once at the apparent
+ * base index, else consider alternative actions, rather than
+ * method poll.)
+ *
+ * This approach also enables support of a user mode in which local
+ * task processing is in FIFO, not LIFO order, simply by using
+ * poll rather than pop. This can be useful in message-passing
+ * frameworks in which tasks are never joined. However neither
+ * mode considers affinities, loads, cache localities, etc, so
+ * rarely provide the best possible performance on a given
+ * machine, but portably provide good throughput by averaging over
+ * these factors. (Further, even if we did try to use such
+ * information, we do not usually have a basis for exploiting it.
+ * For example, some sets of tasks profit from cache affinities,
+ * but others are harmed by cache pollution effects.)
+ *
+ * WorkQueues are also used in a similar way for tasks submitted
+ * to the pool. We cannot mix these tasks in the same queues used
+ * for work-stealing (this would contaminate lifo/fifo
+ * processing). Instead, we loosely associate submission queues
+ * with submitting threads, using a form of hashing. The
+ * ThreadLocal Submitter class contains a value initially used as
+ * a hash code for choosing existing queues, but may be randomly
+ * repositioned upon contention with other submitters. In
+ * essence, submitters act like workers except that they never
+ * take tasks, and they are multiplexed on to a finite number of
+ * shared work queues. However, classes are set up so that future
+ * extensions could allow submitters to optionally help perform
+ * tasks as well. Insertion of tasks in shared mode requires a
+ * lock (mainly to protect in the case of resizing) but we use
+ * only a simple spinlock (using bits in field runState), because
+ * submitters encountering a busy queue move on to try or create
+ * other queues -- they block only when creating and registering
+ * new queues.
+ *
+ * Management
+ * ==========
+ *
+ * The main throughput advantages of work-stealing stem from
+ * decentralized control -- workers mostly take tasks from
+ * themselves or each other. We cannot negate this in the
+ * implementation of other management responsibilities. The main
+ * tactic for avoiding bottlenecks is packing nearly all
+ * essentially atomic control state into two volatile variables
+ * that are by far most often read (not written) as status and
+ * consistency checks.
+ *
+ * Field "ctl" contains 64 bits holding all the information needed
+ * to atomically decide to add, inactivate, enqueue (on an event
+ * queue), dequeue, and/or re-activate workers. To enable this
+ * packing, we restrict maximum parallelism to (1<<15)-1 (which is
+ * far in excess of normal operating range) to allow ids, counts,
+ * and their negations (used for thresholding) to fit into 16bit
+ * fields.
+ *
+ * Field "runState" contains 32 bits needed to register and
+ * deregister WorkQueues, as well as to enable shutdown. It is
+ * only modified under a lock (normally briefly held, but
+ * occasionally protecting allocations and resizings) but even
+ * when locked remains available to check consistency.
+ *
+ * Recording WorkQueues. WorkQueues are recorded in the
+ * "workQueues" array that is created upon pool construction and
+ * expanded if necessary. Updates to the array while recording
+ * new workers and unrecording terminated ones are protected from
+ * each other by a lock but the array is otherwise concurrently
+ * readable, and accessed directly. To simplify index-based
+ * operations, the array size is always a power of two, and all
+ * readers must tolerate null slots. Shared (submission) queues
+ * are at even indices, worker queues at odd indices. Grouping
+ * them together in this way simplifies and speeds up task
+ * scanning.
+ *
+ * All worker thread creation is on-demand, triggered by task
+ * submissions, replacement of terminated workers, and/or
+ * compensation for blocked workers. However, all other support
+ * code is set up to work with other policies. To ensure that we
+ * do not hold on to worker references that would prevent GC, ALL
+ * accesses to workQueues are via indices into the workQueues
+ * array (which is one source of some of the messy code
+ * constructions here). In essence, the workQueues array serves as
+ * a weak reference mechanism. Thus for example the wait queue
+ * field of ctl stores indices, not references. Access to the
+ * workQueues in associated methods (for example signalWork) must
+ * both index-check and null-check the IDs. All such accesses
+ * ignore bad IDs by returning out early from what they are doing,
+ * since this can only be associated with termination, in which
+ * case it is OK to give up. All uses of the workQueues array
+ * also check that it is non-null (even if previously
+ * non-null). This allows nulling during termination, which is
+ * currently not necessary, but remains an option for
+ * resource-revocation-based shutdown schemes. It also helps
+ * reduce JIT issuance of uncommon-trap code, which tends to
+ * unnecessarily complicate control flow in some methods.
+ *
+ * Event Queuing. Unlike HPC work-stealing frameworks, we cannot
+ * let workers spin indefinitely scanning for tasks when none can
+ * be found immediately, and we cannot start/resume workers unless
+ * there appear to be tasks available. On the other hand, we must
+ * quickly prod them into action when new tasks are submitted or
+ * generated. In many usages, ramp-up time to activate workers is
+ * the main limiting factor in overall performance (this is
+ * compounded at program start-up by JIT compilation and
+ * allocation). So we try to streamline this as much as possible.
+ * We park/unpark workers after placing in an event wait queue
+ * when they cannot find work. This "queue" is actually a simple
+ * Treiber stack, headed by the "id" field of ctl, plus a 15bit
+ * counter value (that reflects the number of times a worker has
+ * been inactivated) to avoid ABA effects (we need only as many
+ * version numbers as worker threads). Successors are held in
+ * field WorkQueue.nextWait. Queuing deals with several intrinsic
+ * races, mainly that a task-producing thread can miss seeing (and
+ * signalling) another thread that gave up looking for work but
+ * has not yet entered the wait queue. We solve this by requiring
+ * a full sweep of all workers (via repeated calls to method
+ * scan()) both before and after a newly waiting worker is added
+ * to the wait queue. During a rescan, the worker might release
+ * some other queued worker rather than itself, which has the same
+ * net effect. Because enqueued workers may actually be rescanning
+ * rather than waiting, we set and clear the "parker" field of
+ * WorkQueues to reduce unnecessary calls to unpark. (This
+ * requires a secondary recheck to avoid missed signals.) Note
+ * the unusual conventions about Thread.interrupts surrounding
+ * parking and other blocking: Because interrupts are used solely
+ * to alert threads to check termination, which is checked anyway
+ * upon blocking, we clear status (using Thread.interrupted)
+ * before any call to park, so that park does not immediately
+ * return due to status being set via some other unrelated call to
+ * interrupt in user code.
+ *
+ * Signalling. We create or wake up workers only when there
+ * appears to be at least one task they might be able to find and
+ * execute. When a submission is added or another worker adds a
+ * task to a queue that previously had fewer than two tasks, they
+ * signal waiting workers (or trigger creation of new ones if
+ * fewer than the given parallelism level -- see signalWork).
+ * These primary signals are buttressed by signals during rescans;
+ * together these cover the signals needed in cases when more
+ * tasks are pushed but untaken, and improve performance compared
+ * to having one thread wake up all workers.
+ *
+ * Trimming workers. To release resources after periods of lack of
+ * use, a worker starting to wait when the pool is quiescent will
+ * time out and terminate if the pool has remained quiescent for
+ * SHRINK_RATE nanosecs. This will slowly propagate, eventually
+ * terminating all workers after long periods of non-use.
+ *
+ * Shutdown and Termination. A call to shutdownNow atomically sets
+ * a runState bit and then (non-atomically) sets each worker's
+ * runState status, cancels all unprocessed tasks, and wakes up
+ * all waiting workers. Detecting whether termination should
+ * commence after a non-abrupt shutdown() call requires more work
+ * and bookkeeping. We need consensus about quiescence (i.e., that
+ * there is no more work). The active count provides a primary
+ * indication but non-abrupt shutdown still requires a rechecking
+ * scan for any workers that are inactive but not queued.
+ *
+ * Joining Tasks
+ * =============
+ *
+ * Any of several actions may be taken when one worker is waiting
+ * to join a task stolen (or always held) by another. Because we
+ * are multiplexing many tasks on to a pool of workers, we can't
+ * just let them block (as in Thread.join). We also cannot just
+ * reassign the joiner's run-time stack with another and replace
+ * it later, which would be a form of "continuation", that even if
+ * possible is not necessarily a good idea since we sometimes need
+ * both an unblocked task and its continuation to progress.
+ * Instead we combine two tactics:
+ *
+ * Helping: Arranging for the joiner to execute some task that it
+ * would be running if the steal had not occurred.
+ *
+ * Compensating: Unless there are already enough live threads,
+ * method tryCompensate() may create or re-activate a spare
+ * thread to compensate for blocked joiners until they unblock.
+ *
+ * A third form (implemented in tryRemoveAndExec and
+ * tryPollForAndExec) amounts to helping a hypothetical
+ * compensator: If we can readily tell that a possible action of a
+ * compensator is to steal and execute the task being joined, the
+ * joining thread can do so directly, without the need for a
+ * compensation thread (although at the expense of larger run-time
+ * stacks, but the tradeoff is typically worthwhile).
+ *
+ * The ManagedBlocker extension API can't use helping so relies
+ * only on compensation in method awaitBlocker.
+ *
+ * The algorithm in tryHelpStealer entails a form of "linear"
+ * helping: Each worker records (in field currentSteal) the most
+ * recent task it stole from some other worker. Plus, it records
+ * (in field currentJoin) the task it is currently actively
+ * joining. Method tryHelpStealer uses these markers to try to
+ * find a worker to help (i.e., steal back a task from and execute
+ * it) that could hasten completion of the actively joined task.
+ * In essence, the joiner executes a task that would be on its own
+ * local deque had the to-be-joined task not been stolen. This may
+ * be seen as a conservative variant of the approach in Wagner &
+ * Calder "Leapfrogging: a portable technique for implementing
+ * efficient futures" SIGPLAN Notices, 1993
+ * (http://portal.acm.org/citation.cfm?id=155354). It differs in
+ * that: (1) We only maintain dependency links across workers upon
+ * steals, rather than use per-task bookkeeping. This sometimes
+ * requires a linear scan of workQueues array to locate stealers,
+ * but often doesn't because stealers leave hints (that may become
+ * stale/wrong) of where to locate them. A stealHint is only a
+ * hint because a worker might have had multiple steals and the
+ * hint records only one of them (usually the most current).
+ * Hinting isolates cost to when it is needed, rather than adding
+ * to per-task overhead. (2) It is "shallow", ignoring nesting
+ * and potentially cyclic mutual steals. (3) It is intentionally
+ * racy: field currentJoin is updated only while actively joining,
+ * which means that we miss links in the chain during long-lived
+ * tasks, GC stalls etc (which is OK since blocking in such cases
+ * is usually a good idea). (4) We bound the number of attempts
+ * to find work (see MAX_HELP) and fall back to suspending the
+ * worker and if necessary replacing it with another.
+ *
+ * It is impossible to keep exactly the target parallelism number
+ * of threads running at any given time. Determining the
+ * existence of conservatively safe helping targets, the
+ * availability of already-created spares, and the apparent need
+ * to create new spares are all racy, so we rely on multiple
+ * retries of each. Compensation in the apparent absence of
+ * helping opportunities is challenging to control on JVMs, where
+ * GC and other activities can stall progress of tasks that in
+ * turn stall out many other dependent tasks, without us being
+ * able to determine whether they will ever require compensation.
+ * Even though work-stealing otherwise encounters little
+ * degradation in the presence of more threads than cores,
+ * aggressively adding new threads in such cases entails risk of
+ * unwanted positive feedback control loops in which more threads
+ * cause more dependent stalls (as well as delayed progress of
+ * unblocked threads to the point that we know they are available)
+ * leading to more situations requiring more threads, and so
+ * on. This aspect of control can be seen as an (analytically
+ * intractable) game with an opponent that may choose the worst
+ * (for us) active thread to stall at any time. We take several
+ * precautions to bound losses (and thus bound gains), mainly in
+ * methods tryCompensate and awaitJoin: (1) We only try
+ * compensation after attempting enough helping steps (measured
+ * via counting and timing) that we have already consumed the
+ * estimated cost of creating and activating a new thread. (2) We
+ * allow up to 50% of threads to be blocked before initially
+ * adding any others, and unless completely saturated, check that
+ * some work is available for a new worker before adding. Also, we
+ * create up to only 50% more threads until entering a mode that
+ * only adds a thread if all others are possibly blocked. All
+ * together, this means that we might be half as fast to react,
+ * and create half as many threads as possible in the ideal case,
+ * but present vastly fewer anomalies in all other cases compared
+ * to both more aggressive and more conservative alternatives.
+ *
+ * Style notes: There is a lot of representation-level coupling
+ * among classes ForkJoinPool, ForkJoinWorkerThread, and
+ * ForkJoinTask. The fields of WorkQueue maintain data structures
+ * managed by ForkJoinPool, so are directly accessed. There is
+ * little point trying to reduce this, since any associated future
+ * changes in representations will need to be accompanied by
+ * algorithmic changes anyway. Several methods intrinsically
+ * sprawl because they must accumulate sets of consistent reads of
+ * volatiles held in local variables. Methods signalWork() and
+ * scan() are the main bottlenecks, so are especially heavily
+ * micro-optimized/mangled. There are lots of inline assignments
+ * (of form "while ((local = field) != 0)") which are usually the
+ * simplest way to ensure the required read orderings (which are
+ * sometimes critical). This leads to a "C"-like style of listing
+ * declarations of these locals at the heads of methods or blocks.
+ * There are several occurrences of the unusual "do {} while
+ * (!cas...)" which is the simplest way to force an update of a
+ * CAS'ed variable. There are also other coding oddities that help
+ * some methods perform reasonably even when interpreted (not
+ * compiled).
+ *
+ * The order of declarations in this file is:
+ * (1) Static utility functions
+ * (2) Nested (static) classes
+ * (3) Static fields
+ * (4) Fields, along with constants used when unpacking some of them
+ * (5) Internal control methods
+ * (6) Callbacks and other support for ForkJoinTask methods
+ * (7) Exported methods
+ * (8) Static block initializing statics in minimally dependent order
*/
- /** Mask for packing and unpacking shorts */
- private static final int shortMask = 0xffff;
-
- /** Max pool size -- must be a power of two minus 1 */
- private static final int MAX_THREADS = 0x7FFF;
+ // Static utilities
- // placeholder for java.util.concurrent.RunnableFuture
- interface RunnableFuture<T> extends Runnable {
+ /**
+ * If there is a security manager, makes sure caller has
+ * permission to modify threads.
+ */
+ private static void checkPermission() {
+ SecurityManager security = System.getSecurityManager();
+ if (security != null)
+ security.checkPermission(modifyThreadPermission);
}
+ // Nested classes
+
/**
- * Factory for creating new ForkJoinWorkerThreads. A
- * ForkJoinWorkerThreadFactory must be defined and used for
- * ForkJoinWorkerThread subclasses that extend base functionality
- * or initialize threads with different contexts.
+ * Factory for creating new {@link ForkJoinWorkerThread}s.
+ * A {@code ForkJoinWorkerThreadFactory} must be defined and used
+ * for {@code ForkJoinWorkerThread} subclasses that extend base
+ * functionality or initialize threads with different contexts.
*/
public static interface ForkJoinWorkerThreadFactory {
/**
* Returns a new worker thread operating in the given pool.
*
* @param pool the pool this thread works in
- * @throws NullPointerException if pool is null;
+ * @throws NullPointerException if the pool is null
*/
public ForkJoinWorkerThread newThread(ForkJoinPool pool);
}
/**
- * Default ForkJoinWorkerThreadFactory implementation, creates a
+ * Default ForkJoinWorkerThreadFactory implementation; creates a
* new ForkJoinWorkerThread.
*/
- static class DefaultForkJoinWorkerThreadFactory
+ static class DefaultForkJoinWorkerThreadFactory
implements ForkJoinWorkerThreadFactory {
public ForkJoinWorkerThread newThread(ForkJoinPool pool) {
- try {
- return new ForkJoinWorkerThread(pool);
- } catch (OutOfMemoryError oom) {
- return null;
- }
+ return new ForkJoinWorkerThread(pool);
}
}
/**
- * Creates a new ForkJoinWorkerThread. This factory is used unless
- * overridden in ForkJoinPool constructors.
+ * A simple non-reentrant lock used for exclusion when managing
+ * queues and workers. We use a custom lock so that we can readily
+ * probe lock state in constructions that check among alternative
+ * actions. The lock is normally only very briefly held, and
+ * sometimes treated as a spinlock, but other usages block to
+ * reduce overall contention in those cases where locked code
+ * bodies perform allocation/resizing.
*/
- public static final ForkJoinWorkerThreadFactory
- defaultForkJoinWorkerThreadFactory =
- new DefaultForkJoinWorkerThreadFactory();
-
- /**
- * Permission required for callers of methods that may start or
- * kill threads.
- */
- private static final RuntimePermission modifyThreadPermission =
- new RuntimePermission("modifyThread");
+ static final class Mutex extends AbstractQueuedSynchronizer {
+ public final boolean tryAcquire(int ignore) {
+ return compareAndSetState(0, 1);
+ }
+ public final boolean tryRelease(int ignore) {
+ setState(0);
+ return true;
+ }
+ public final void lock() { acquire(0); }
+ public final void unlock() { release(0); }
+ public final boolean isHeldExclusively() { return getState() == 1; }
+ public final Condition newCondition() { return new ConditionObject(); }
+ }
/**
- * If there is a security manager, makes sure caller has
- * permission to modify threads.
+ * Class for artificial tasks that are used to replace the target
+ * of local joins if they are removed from an interior queue slot
+ * in WorkQueue.tryRemoveAndExec. We don't need the proxy to
+ * actually do anything beyond having a unique identity.
*/
- private static void checkPermission() {
- SecurityManager security = System.getSecurityManager();
- if (security != null)
- security.checkPermission(modifyThreadPermission);
+ static final class EmptyTask extends ForkJoinTask<Void> {
+ EmptyTask() { status = ForkJoinTask.NORMAL; } // force done
+ public final Void getRawResult() { return null; }
+ public final void setRawResult(Void x) {}
+ public final boolean exec() { return true; }
}
/**
- * Generator for assigning sequence numbers as pool names.
- */
- private static final AtomicInteger poolNumberGenerator =
- new AtomicInteger();
+ * Queues supporting work-stealing as well as external task
+ * submission. See above for main rationale and algorithms.
+ * Implementation relies heavily on "Unsafe" intrinsics
+ * and selective use of "volatile":
+ *
+ * Field "base" is the index (mod array.length) of the least valid
+ * queue slot, which is always the next position to steal (poll)
+ * from if nonempty. Reads and writes require volatile orderings
+ * but not CAS, because updates are only performed after slot
+ * CASes.
+ *
+ * Field "top" is the index (mod array.length) of the next queue
+ * slot to push to or pop from. It is written only by owner thread
+ * for push, or under lock for trySharedPush, and accessed by
+ * other threads only after reading (volatile) base. Both top and
+ * base are allowed to wrap around on overflow, but (top - base)
+ * (or more commonly -(base - top) to force volatile read of base
+ * before top) still estimates size.
+ *
+ * The array slots are read and written using the emulation of
+ * volatiles/atomics provided by Unsafe. Insertions must in
+ * general use putOrderedObject as a form of releasing store to
+ * ensure that all writes to the task object are ordered before
+ * its publication in the queue. (Although we can avoid one case
+ * of this when locked in trySharedPush.) All removals entail a
+ * CAS to null. The array is always a power of two. To ensure
+ * safety of Unsafe array operations, all accesses perform
+ * explicit null checks and implicit bounds checks via
+ * power-of-two masking.
+ *
+ * In addition to basic queuing support, this class contains
+ * fields described elsewhere to control execution. It turns out
+ * to work better memory-layout-wise to include them in this
+ * class rather than a separate class.
+ *
+ * Performance on most platforms is very sensitive to placement of
+ * instances of both WorkQueues and their arrays -- we absolutely
+ * do not want multiple WorkQueue instances or multiple queue
+ * arrays sharing cache lines. (It would be best for queue objects
+ * and their arrays to share, but there is nothing available to
+ * help arrange that). Unfortunately, because they are recorded
+ * in a common array, WorkQueue instances are often moved to be
+ * adjacent by garbage collectors. To reduce impact, we use field
+ * padding that works OK on common platforms; this effectively
+ * trades off slightly slower average field access for the sake of
+ * avoiding really bad worst-case access. (Until better JVM
+ * support is in place, this padding is dependent on transient
+ * properties of JVM field layout rules.) We also take care in
+ * allocating, sizing and resizing the array. Non-shared queue
+ * arrays are initialized (via method growArray) by workers before
+ * use. Others are allocated on first use.
+ */
+ static final class WorkQueue {
+ /**
+ * Capacity of work-stealing queue array upon initialization.
+ * Must be a power of two; at least 4, but should be larger to
+ * reduce or eliminate cacheline sharing among queues.
+ * Currently, it is much larger, as a partial workaround for
+ * the fact that JVMs often place arrays in locations that
+ * share GC bookkeeping (especially cardmarks) such that
+ * per-write accesses encounter serious memory contention.
+ */
+ static final int INITIAL_QUEUE_CAPACITY = 1 << 13;
- /**
- * Array holding all worker threads in the pool. Initialized upon
- * first use. Array size must be a power of two. Updates and
- * replacements are protected by workerLock, but it is always kept
- * in a consistent enough state to be randomly accessed without
- * locking by workers performing work-stealing.
- */
- public volatile ForkJoinWorkerThread[] workers;
+ /**
+ * Maximum size for queue arrays. Must be a power of two less
+ * than or equal to 1 << (31 - width of array entry) to ensure
+ * lack of wraparound of index calculations, but defined to a
+ * value a bit less than this to help users trap runaway
+ * programs before saturating systems.
+ */
+ static final int MAXIMUM_QUEUE_CAPACITY = 1 << 26; // 64M
+
+ volatile long totalSteals; // cumulative number of steals
+ int seed; // for random scanning; initialize nonzero
+ volatile int eventCount; // encoded inactivation count; < 0 if inactive
+ int nextWait; // encoded record of next event waiter
+ int rescans; // remaining scans until block
+ int nsteals; // top-level task executions since last idle
+ final int mode; // lifo, fifo, or shared
+ int poolIndex; // index of this queue in pool (or 0)
+ int stealHint; // index of most recent known stealer
+ volatile int runState; // 1: locked, -1: terminate; else 0
+ volatile int base; // index of next slot for poll
+ int top; // index of next slot for push
+ ForkJoinTask<?>[] array; // the elements (initially unallocated)
+ final ForkJoinPool pool; // the containing pool (may be null)
+ final ForkJoinWorkerThread owner; // owning thread or null if shared
+ volatile Thread parker; // == owner during call to park; else null
+ ForkJoinTask<?> currentJoin; // task being joined in awaitJoin
+ ForkJoinTask<?> currentSteal; // current non-local task being executed
+ // Heuristic padding to ameliorate unfortunate memory placements
+ Object p00, p01, p02, p03, p04, p05, p06, p07;
+ Object p08, p09, p0a, p0b, p0c, p0d, p0e;
+
+ WorkQueue(ForkJoinPool pool, ForkJoinWorkerThread owner, int mode) {
+ this.mode = mode;
+ this.pool = pool;
+ this.owner = owner;
+ // Place indices in the center of array (that is not yet allocated)
+ base = top = INITIAL_QUEUE_CAPACITY >>> 1;
+ }
- /**
- * Lock protecting access to workers.
- */
- private final ReentrantLock workerLock;
+ /**
+ * Returns the approximate number of tasks in the queue.
+ */
+ final int queueSize() {
+ int n = base - top; // non-owner callers must read base first
+ return (n >= 0) ? 0 : -n; // ignore transient negative
+ }
- /**
- * Condition for awaitTermination.
- */
- private final Condition termination;
+ /**
+ * Provides a more accurate estimate of whether this queue has
+ * any tasks than does queueSize, by checking whether a
+ * near-empty queue has at least one unclaimed task.
+ */
+ final boolean isEmpty() {
+ ForkJoinTask<?>[] a; int m, s;
+ int n = base - (s = top);
+ return (n >= 0 ||
+ (n == -1 &&
+ ((a = array) == null ||
+ (m = a.length - 1) < 0 ||
+ U.getObjectVolatile
+ (a, ((m & (s - 1)) << ASHIFT) + ABASE) == null)));
+ }
+
+ /**
+ * Pushes a task. Call only by owner in unshared queues.
+ *
+ * @param task the task. Caller must ensure non-null.
+ * @throw RejectedExecutionException if array cannot be resized
+ */
+ final void push(ForkJoinTask<?> task) {
+ ForkJoinTask<?>[] a; ForkJoinPool p;
+ int s = top, m, n;
+ if ((a = array) != null) { // ignore if queue removed
+ U.putOrderedObject
+ (a, (((m = a.length - 1) & s) << ASHIFT) + ABASE, task);
+ if ((n = (top = s + 1) - base) <= 2) {
+ if ((p = pool) != null)
+ p.signalWork();
+ }
+ else if (n >= m)
+ growArray(true);
+ }
+ }
+
+ /**
+ * Pushes a task if lock is free and array is either big
+ * enough or can be resized to be big enough.
+ *
+ * @param task the task. Caller must ensure non-null.
+ * @return true if submitted
+ */
+ final boolean trySharedPush(ForkJoinTask<?> task) {
+ boolean submitted = false;
+ if (runState == 0 && U.compareAndSwapInt(this, RUNSTATE, 0, 1)) {
+ ForkJoinTask<?>[] a = array;
+ int s = top;
+ try {
+ if ((a != null && a.length > s + 1 - base) ||
+ (a = growArray(false)) != null) { // must presize
+ int j = (((a.length - 1) & s) << ASHIFT) + ABASE;
+ U.putObject(a, (long)j, task); // don't need "ordered"
+ top = s + 1;
+ submitted = true;
+ }
+ } finally {
+ runState = 0; // unlock
+ }
+ }
+ return submitted;
+ }
+
+ /**
+ * Takes next task, if one exists, in LIFO order. Call only
+ * by owner in unshared queues. (We do not have a shared
+ * version of this method because it is never needed.)
+ */
+ final ForkJoinTask<?> pop() {
+ ForkJoinTask<?> t; int m;
+ ForkJoinTask<?>[] a = array;
+ if (a != null && (m = a.length - 1) >= 0) {
+ for (int s; (s = top - 1) - base >= 0;) {
+ int j = ((m & s) << ASHIFT) + ABASE;
+ if ((t = (ForkJoinTask<?>)U.getObjectVolatile(a, j)) == null)
+ break;
+ if (U.compareAndSwapObject(a, j, t, null)) {
+ top = s;
+ return t;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Takes a task in FIFO order if b is base of queue and a task
+ * can be claimed without contention. Specialized versions
+ * appear in ForkJoinPool methods scan and tryHelpStealer.
+ */
+ final ForkJoinTask<?> pollAt(int b) {
+ ForkJoinTask<?> t; ForkJoinTask<?>[] a;
+ if ((a = array) != null) {
+ int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ if ((t = (ForkJoinTask<?>)U.getObjectVolatile(a, j)) != null &&
+ base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ return t;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Takes next task, if one exists, in FIFO order.
+ */
+ final ForkJoinTask<?> poll() {
+ ForkJoinTask<?>[] a; int b; ForkJoinTask<?> t;
+ while ((b = base) - top < 0 && (a = array) != null) {
+ int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t != null) {
+ if (base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ return t;
+ }
+ }
+ else if (base == b) {
+ if (b + 1 == top)
+ break;
+ Thread.yield(); // wait for lagging update
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Takes next task, if one exists, in order specified by mode.
+ */
+ final ForkJoinTask<?> nextLocalTask() {
+ return mode == 0 ? pop() : poll();
+ }
+
+ /**
+ * Returns next task, if one exists, in order specified by mode.
+ */
+ final ForkJoinTask<?> peek() {
+ ForkJoinTask<?>[] a = array; int m;
+ if (a == null || (m = a.length - 1) < 0)
+ return null;
+ int i = mode == 0 ? top - 1 : base;
+ int j = ((i & m) << ASHIFT) + ABASE;
+ return (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ }
+
+ /**
+ * Pops the given task only if it is at the current top.
+ */
+ final boolean tryUnpush(ForkJoinTask<?> t) {
+ ForkJoinTask<?>[] a; int s;
+ if ((a = array) != null && (s = top) != base &&
+ U.compareAndSwapObject
+ (a, (((a.length - 1) & --s) << ASHIFT) + ABASE, t, null)) {
+ top = s;
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Polls the given task only if it is at the current base.
+ */
+ final boolean pollFor(ForkJoinTask<?> task) {
+ ForkJoinTask<?>[] a; int b;
+ if ((b = base) - top < 0 && (a = array) != null) {
+ int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ if (U.getObjectVolatile(a, j) == task && base == b &&
+ U.compareAndSwapObject(a, j, task, null)) {
+ base = b + 1;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * If present, removes from queue and executes the given task, or
+ * any other cancelled task. Returns (true) immediately on any CAS
+ * or consistency check failure so caller can retry.
+ *
+ * @return false if no progress can be made
+ */
+ final boolean tryRemoveAndExec(ForkJoinTask<?> task) {
+ boolean removed = false, empty = true, progress = true;
+ ForkJoinTask<?>[] a; int m, s, b, n;
+ if ((a = array) != null && (m = a.length - 1) >= 0 &&
+ (n = (s = top) - (b = base)) > 0) {
+ for (ForkJoinTask<?> t;;) { // traverse from s to b
+ int j = ((--s & m) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t == null) // inconsistent length
+ break;
+ else if (t == task) {
+ if (s + 1 == top) { // pop
+ if (!U.compareAndSwapObject(a, j, task, null))
+ break;
+ top = s;
+ removed = true;
+ }
+ else if (base == b) // replace with proxy
+ removed = U.compareAndSwapObject(a, j, task,
+ new EmptyTask());
+ break;
+ }
+ else if (t.status >= 0)
+ empty = false;
+ else if (s + 1 == top) { // pop and throw away
+ if (U.compareAndSwapObject(a, j, t, null))
+ top = s;
+ break;
+ }
+ if (--n == 0) {
+ if (!empty && base == b)
+ progress = false;
+ break;
+ }
+ }
+ }
+ if (removed)
+ task.doExec();
+ return progress;
+ }
+
+ /**
+ * Initializes or doubles the capacity of array. Call either
+ * by owner or with lock held -- it is OK for base, but not
+ * top, to move while resizings are in progress.
+ *
+ * @param rejectOnFailure if true, throw exception if capacity
+ * exceeded (relayed ultimately to user); else return null.
+ */
+ final ForkJoinTask<?>[] growArray(boolean rejectOnFailure) {
+ ForkJoinTask<?>[] oldA = array;
+ int size = oldA != null ? oldA.length << 1 : INITIAL_QUEUE_CAPACITY;
+ if (size <= MAXIMUM_QUEUE_CAPACITY) {
+ int oldMask, t, b;
+ ForkJoinTask<?>[] a = array = new ForkJoinTask<?>[size];
+ if (oldA != null && (oldMask = oldA.length - 1) >= 0 &&
+ (t = top) - (b = base) > 0) {
+ int mask = size - 1;
+ do {
+ ForkJoinTask<?> x;
+ int oldj = ((b & oldMask) << ASHIFT) + ABASE;
+ int j = ((b & mask) << ASHIFT) + ABASE;
+ x = (ForkJoinTask<?>)U.getObjectVolatile(oldA, oldj);
+ if (x != null &&
+ U.compareAndSwapObject(oldA, oldj, x, null))
+ U.putObjectVolatile(a, j, x);
+ } while (++b != t);
+ }
+ return a;
+ }
+ else if (!rejectOnFailure)
+ return null;
+ else
+ throw new RejectedExecutionException("Queue capacity exceeded");
+ }
+
+ /**
+ * Removes and cancels all known tasks, ignoring any exceptions.
+ */
+ final void cancelAll() {
+ ForkJoinTask.cancelIgnoringExceptions(currentJoin);
+ ForkJoinTask.cancelIgnoringExceptions(currentSteal);
+ for (ForkJoinTask<?> t; (t = poll()) != null; )
+ ForkJoinTask.cancelIgnoringExceptions(t);
+ }
+
+ /**
+ * Computes next value for random probes. Scans don't require
+ * a very high quality generator, but also not a crummy one.
+ * Marsaglia xor-shift is cheap and works well enough. Note:
+ * This is manually inlined in its usages in ForkJoinPool to
+ * avoid writes inside busy scan loops.
+ */
+ final int nextSeed() {
+ int r = seed;
+ r ^= r << 13;
+ r ^= r >>> 17;
+ return seed = r ^= r << 5;
+ }
+
+ // Execution methods
+
+ /**
+ * Removes and runs tasks until empty, using local mode
+ * ordering. Normally called only after checking for apparent
+ * non-emptiness.
+ */
+ final void runLocalTasks() {
+ // hoist checks from repeated pop/poll
+ ForkJoinTask<?>[] a; int m;
+ if ((a = array) != null && (m = a.length - 1) >= 0) {
+ if (mode == 0) {
+ for (int s; (s = top - 1) - base >= 0;) {
+ int j = ((m & s) << ASHIFT) + ABASE;
+ ForkJoinTask<?> t =
+ (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t != null) {
+ if (U.compareAndSwapObject(a, j, t, null)) {
+ top = s;
+ t.doExec();
+ }
+ }
+ else
+ break;
+ }
+ }
+ else {
+ for (int b; (b = base) - top < 0;) {
+ int j = ((m & b) << ASHIFT) + ABASE;
+ ForkJoinTask<?> t =
+ (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t != null) {
+ if (base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ t.doExec();
+ }
+ } else if (base == b) {
+ if (b + 1 == top)
+ break;
+ Thread.yield(); // wait for lagging update
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Executes a top-level task and any local tasks remaining
+ * after execution.
+ *
+ * @return true unless terminating
+ */
+ final boolean runTask(ForkJoinTask<?> t) {
+ boolean alive = true;
+ if (t != null) {
+ currentSteal = t;
+ t.doExec();
+ if (top != base) // conservative guard
+ runLocalTasks();
+ ++nsteals;
+ currentSteal = null;
+ }
+ else if (runState < 0) // terminating
+ alive = false;
+ return alive;
+ }
+
+ /**
+ * Executes a non-top-level (stolen) task.
+ */
+ final void runSubtask(ForkJoinTask<?> t) {
+ if (t != null) {
+ ForkJoinTask<?> ps = currentSteal;
+ currentSteal = t;
+ t.doExec();
+ currentSteal = ps;
+ }
+ }
+
+ /**
+ * Returns true if owned and not known to be blocked.
+ */
+ final boolean isApparentlyUnblocked() {
+ Thread wt; Thread.State s;
+ return (eventCount >= 0 &&
+ (wt = owner) != null &&
+ (s = wt.getState()) != Thread.State.BLOCKED &&
+ s != Thread.State.WAITING &&
+ s != Thread.State.TIMED_WAITING);
+ }
+
+ /**
+ * If this owned and is not already interrupted, try to
+ * interrupt and/or unpark, ignoring exceptions.
+ */
+ final void interruptOwner() {
+ Thread wt, p;
+ if ((wt = owner) != null && !wt.isInterrupted()) {
+ try {
+ wt.interrupt();
+ } catch (SecurityException ignore) {
+ }
+ }
+ if ((p = parker) != null)
+ U.unpark(p);
+ }
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long RUNSTATE;
+ private static final int ABASE;
+ private static final int ASHIFT;
+ static {
+ int s;
+ try {
+ U = getUnsafe();
+ Class<?> k = WorkQueue.class;
+ Class<?> ak = ForkJoinTask[].class;
+ RUNSTATE = U.objectFieldOffset
+ (k.getDeclaredField("runState"));
+ ABASE = U.arrayBaseOffset(ak);
+ s = U.arrayIndexScale(ak);
+ } catch (Exception e) {
+ throw new Error(e);
+ }
+ if ((s & (s-1)) != 0)
+ throw new Error("data type scale not a power of two");
+ ASHIFT = 31 - Integer.numberOfLeadingZeros(s);
+ }
+ }
/**
- * The uncaught exception handler used when any worker
- * abrupty terminates
- */
- private Thread.UncaughtExceptionHandler ueh;
+ * Per-thread records for threads that submit to pools. Currently
+ * holds only pseudo-random seed / index that is used to choose
+ * submission queues in method doSubmit. In the future, this may
+ * also incorporate a means to implement different task rejection
+ * and resubmission policies.
+ *
+ * Seeds for submitters and workers/workQueues work in basically
+ * the same way but are initialized and updated using slightly
+ * different mechanics. Both are initialized using the same
+ * approach as in class ThreadLocal, where successive values are
+ * unlikely to collide with previous values. This is done during
+ * registration for workers, but requires a separate AtomicInteger
+ * for submitters. Seeds are then randomly modified upon
+ * collisions using xorshifts, which requires a non-zero seed.
+ */
+ static final class Submitter {
+ int seed;
+ Submitter() {
+ int s = nextSubmitterSeed.getAndAdd(SEED_INCREMENT);
+ seed = (s == 0) ? 1 : s; // ensure non-zero
+ }
+ }
+
+ /** ThreadLocal class for Submitters */
+ static final class ThreadSubmitter extends ThreadLocal<Submitter> {
+ public Submitter initialValue() { return new Submitter(); }
+ }
+
+ // static fields (initialized in static initializer below)
/**
- * Creation factory for worker threads.
+ * Creates a new ForkJoinWorkerThread. This factory is used unless
+ * overridden in ForkJoinPool constructors.
*/
- private final ForkJoinWorkerThreadFactory factory;
+ public static final ForkJoinWorkerThreadFactory
+ defaultForkJoinWorkerThreadFactory;
/**
- * Head of stack of threads that were created to maintain
- * parallelism when other threads blocked, but have since
- * suspended when the parallelism level rose.
+ * Generator for assigning sequence numbers as pool names.
*/
- private volatile WaitQueueNode spareStack;
+ private static final AtomicInteger poolNumberGenerator;
/**
- * Sum of per-thread steal counts, updated only when threads are
- * idle or terminating.
+ * Generator for initial hashes/seeds for submitters. Accessed by
+ * Submitter class constructor.
*/
- private final AtomicLong stealCount;
+ static final AtomicInteger nextSubmitterSeed;
/**
- * Queue for external submissions.
+ * Permission required for callers of methods that may start or
+ * kill threads.
*/
- private final LinkedTransferQueue<ForkJoinTask<?>> submissionQueue;
+ private static final RuntimePermission modifyThreadPermission;
/**
- * Head of Treiber stack for barrier sync. See below for explanation
+ * Per-thread submission bookeeping. Shared across all pools
+ * to reduce ThreadLocal pollution and because random motion
+ * to avoid contention in one pool is likely to hold for others.
*/
- private volatile WaitQueueNode syncStack;
+ private static final ThreadSubmitter submitters;
+
+ // static constants
/**
- * The count for event barrier
+ * The wakeup interval (in nanoseconds) for a worker waiting for a
+ * task when the pool is quiescent to instead try to shrink the
+ * number of workers. The exact value does not matter too
+ * much. It must be short enough to release resources during
+ * sustained periods of idleness, but not so short that threads
+ * are continually re-created.
*/
- private volatile long eventCount;
+ private static final long SHRINK_RATE =
+ 4L * 1000L * 1000L * 1000L; // 4 seconds
/**
- * Pool number, just for assigning useful names to worker threads
+ * The timeout value for attempted shrinkage, includes
+ * some slop to cope with system timer imprecision.
*/
- private final int poolNumber;
+ private static final long SHRINK_TIMEOUT = SHRINK_RATE - (SHRINK_RATE / 10);
/**
- * The maximum allowed pool size
+ * The maximum stolen->joining link depth allowed in method
+ * tryHelpStealer. Must be a power of two. This value also
+ * controls the maximum number of times to try to help join a task
+ * without any apparent progress or change in pool state before
+ * giving up and blocking (see awaitJoin). Depths for legitimate
+ * chains are unbounded, but we use a fixed constant to avoid
+ * (otherwise unchecked) cycles and to bound staleness of
+ * traversal parameters at the expense of sometimes blocking when
+ * we could be helping.
*/
- private volatile int maxPoolSize;
+ private static final int MAX_HELP = 32;
/**
- * The desired parallelism level, updated only under workerLock.
+ * Secondary time-based bound (in nanosecs) for helping attempts
+ * before trying compensated blocking in awaitJoin. Used in
+ * conjunction with MAX_HELP to reduce variance due to different
+ * polling rates associated with different helping options. The
+ * value should roughly approximate the time required to create
+ * and/or activate a worker thread.
*/
- private volatile int parallelism;
+ private static final long COMPENSATION_DELAY = 100L * 1000L; // 0.1 millisec
/**
- * True if use local fifo, not default lifo, for local polling
+ * Increment for seed generators. See class ThreadLocal for
+ * explanation.
*/
- private volatile boolean locallyFifo;
+ private static final int SEED_INCREMENT = 0x61c88647;
/**
- * Holds number of total (i.e., created and not yet terminated)
- * and running (i.e., not blocked on joins or other managed sync)
- * threads, packed into one int to ensure consistent snapshot when
- * making decisions about creating and suspending spare
- * threads. Updated only by CAS. Note: CASes in
- * updateRunningCount and preJoin running active count is in low
- * word, so need to be modified if this changes
- */
- private volatile int workerCounts;
+ * Bits and masks for control variables
+ *
+ * Field ctl is a long packed with:
+ * AC: Number of active running workers minus target parallelism (16 bits)
+ * TC: Number of total workers minus target parallelism (16 bits)
+ * ST: true if pool is terminating (1 bit)
+ * EC: the wait count of top waiting thread (15 bits)
+ * ID: poolIndex of top of Treiber stack of waiters (16 bits)
+ *
+ * When convenient, we can extract the upper 32 bits of counts and
+ * the lower 32 bits of queue state, u = (int)(ctl >>> 32) and e =
+ * (int)ctl. The ec field is never accessed alone, but always
+ * together with id and st. The offsets of counts by the target
+ * parallelism and the positionings of fields makes it possible to
+ * perform the most common checks via sign tests of fields: When
+ * ac is negative, there are not enough active workers, when tc is
+ * negative, there are not enough total workers, and when e is
+ * negative, the pool is terminating. To deal with these possibly
+ * negative fields, we use casts in and out of "short" and/or
+ * signed shifts to maintain signedness.
+ *
+ * When a thread is queued (inactivated), its eventCount field is
+ * set negative, which is the only way to tell if a worker is
+ * prevented from executing tasks, even though it must continue to
+ * scan for them to avoid queuing races. Note however that
+ * eventCount updates lag releases so usage requires care.
+ *
+ * Field runState is an int packed with:
+ * SHUTDOWN: true if shutdown is enabled (1 bit)
+ * SEQ: a sequence number updated upon (de)registering workers (30 bits)
+ * INIT: set true after workQueues array construction (1 bit)
+ *
+ * The sequence number enables simple consistency checks:
+ * Staleness of read-only operations on the workQueues array can
+ * be checked by comparing runState before vs after the reads.
+ */
+
+ // bit positions/shifts for fields
+ private static final int AC_SHIFT = 48;
+ private static final int TC_SHIFT = 32;
+ private static final int ST_SHIFT = 31;
+ private static final int EC_SHIFT = 16;
+
+ // bounds
+ private static final int SMASK = 0xffff; // short bits
+ private static final int MAX_CAP = 0x7fff; // max #workers - 1
+ private static final int SQMASK = 0xfffe; // even short bits
+ private static final int SHORT_SIGN = 1 << 15;
+ private static final int INT_SIGN = 1 << 31;
+
+ // masks
+ private static final long STOP_BIT = 0x0001L << ST_SHIFT;
+ private static final long AC_MASK = ((long)SMASK) << AC_SHIFT;
+ private static final long TC_MASK = ((long)SMASK) << TC_SHIFT;
+
+ // units for incrementing and decrementing
+ private static final long TC_UNIT = 1L << TC_SHIFT;
+ private static final long AC_UNIT = 1L << AC_SHIFT;
+
+ // masks and units for dealing with u = (int)(ctl >>> 32)
+ private static final int UAC_SHIFT = AC_SHIFT - 32;
+ private static final int UTC_SHIFT = TC_SHIFT - 32;
+ private static final int UAC_MASK = SMASK << UAC_SHIFT;
+ private static final int UTC_MASK = SMASK << UTC_SHIFT;
+ private static final int UAC_UNIT = 1 << UAC_SHIFT;
+ private static final int UTC_UNIT = 1 << UTC_SHIFT;
+
+ // masks and units for dealing with e = (int)ctl
+ private static final int E_MASK = 0x7fffffff; // no STOP_BIT
+ private static final int E_SEQ = 1 << EC_SHIFT;
+
+ // runState bits
+ private static final int SHUTDOWN = 1 << 31;
+
+ // access mode for WorkQueue
+ static final int LIFO_QUEUE = 0;
+ static final int FIFO_QUEUE = 1;
+ static final int SHARED_QUEUE = -1;
+
+ // Instance fields
- private static int totalCountOf(int s) { return s >>> 16; }
- private static int runningCountOf(int s) { return s & shortMask; }
- private static int workerCountsFor(int t, int r) { return (t << 16) + r; }
+ /*
+ * Field layout order in this class tends to matter more than one
+ * would like. Runtime layout order is only loosely related to
+ * declaration order and may differ across JVMs, but the following
+ * empirically works OK on current JVMs.
+ */
+
+ volatile long ctl; // main pool control
+ final int parallelism; // parallelism level
+ final int localMode; // per-worker scheduling mode
+ final int submitMask; // submit queue index bound
+ int nextSeed; // for initializing worker seeds
+ volatile int runState; // shutdown status and seq
+ WorkQueue[] workQueues; // main registry
+ final Mutex lock; // for registration
+ final Condition termination; // for awaitTermination
+ final ForkJoinWorkerThreadFactory factory; // factory for new workers
+ final Thread.UncaughtExceptionHandler ueh; // per-worker UEH
+ final AtomicLong stealCount; // collect counts when terminated
+ final AtomicInteger nextWorkerNumber; // to create worker name string
+ final String workerNamePrefix; // to create worker name string
+
+ // Creating, registering, and deregistering workers
+
+ /**
+ * Tries to create and start a worker
+ */
+ private void addWorker() {
+ Throwable ex = null;
+ ForkJoinWorkerThread wt = null;
+ try {
+ if ((wt = factory.newThread(this)) != null) {
+ wt.start();
+ return;
+ }
+ } catch (Throwable e) {
+ ex = e;
+ }
+ deregisterWorker(wt, ex); // adjust counts etc on failure
+ }
/**
- * Add delta (which may be negative) to running count. This must
- * be called before (with negative arg) and after (with positive)
- * any managed synchronization (i.e., mainly, joins)
- * @param delta the number to add
+ * Callback from ForkJoinWorkerThread constructor to assign a
+ * public name. This must be separate from registerWorker because
+ * it is called during the "super" constructor call in
+ * ForkJoinWorkerThread.
*/
- final void updateRunningCount(int delta) {
- int s;
- do;while (!casWorkerCounts(s = workerCounts, s + delta));
+ final String nextWorkerName() {
+ return workerNamePrefix.concat
+ (Integer.toString(nextWorkerNumber.addAndGet(1)));
}
/**
- * Add delta (which may be negative) to both total and running
- * count. This must be called upon creation and termination of
- * worker threads.
- * @param delta the number to add
+ * Callback from ForkJoinWorkerThread constructor to establish its
+ * poolIndex and record its WorkQueue. To avoid scanning bias due
+ * to packing entries in front of the workQueues array, we treat
+ * the array as a simple power-of-two hash table using per-thread
+ * seed as hash, expanding as needed.
+ *
+ * @param w the worker's queue
*/
- private void updateWorkerCount(int delta) {
- int d = delta + (delta << 16); // add to both lo and hi parts
- int s;
- do;while (!casWorkerCounts(s = workerCounts, s + d));
+ final void registerWorker(WorkQueue w) {
+ Mutex lock = this.lock;
+ lock.lock();
+ try {
+ WorkQueue[] ws = workQueues;
+ if (w != null && ws != null) { // skip on shutdown/failure
+ int rs, n;
+ while ((n = ws.length) < // ensure can hold total
+ (parallelism + (short)(ctl >>> TC_SHIFT) << 1))
+ workQueues = ws = Arrays.copyOf(ws, n << 1);
+ int m = n - 1;
+ int s = nextSeed += SEED_INCREMENT; // rarely-colliding sequence
+ w.seed = (s == 0) ? 1 : s; // ensure non-zero seed
+ int r = (s << 1) | 1; // use odd-numbered indices
+ while (ws[r &= m] != null) // step by approx half size
+ r += ((n >>> 1) & SQMASK) + 2;
+ w.eventCount = w.poolIndex = r; // establish before recording
+ ws[r] = w; // also update seq
+ runState = ((rs = runState) & SHUTDOWN) | ((rs + 2) & ~SHUTDOWN);
+ }
+ } finally {
+ lock.unlock();
+ }
}
/**
- * Lifecycle control. High word contains runState, low word
- * contains the number of workers that are (probably) executing
- * tasks. This value is atomically incremented before a worker
- * gets a task to run, and decremented when worker has no tasks
- * and cannot find any. These two fields are bundled together to
- * support correct termination triggering. Note: activeCount
- * CAS'es cheat by assuming active count is in low word, so need
- * to be modified if this changes
- */
- private volatile int runControl;
-
- // RunState values. Order among values matters
- private static final int RUNNING = 0;
- private static final int SHUTDOWN = 1;
- private static final int TERMINATING = 2;
- private static final int TERMINATED = 3;
+ * Final callback from terminating worker, as well as upon failure
+ * to construct or start a worker in addWorker. Removes record of
+ * worker from array, and adjusts counts. If pool is shutting
+ * down, tries to complete termination.
+ *
+ * @param wt the worker thread or null if addWorker failed
+ * @param ex the exception causing failure, or null if none
+ */
+ final void deregisterWorker(ForkJoinWorkerThread wt, Throwable ex) {
+ Mutex lock = this.lock;
+ WorkQueue w = null;
+ if (wt != null && (w = wt.workQueue) != null) {
+ w.runState = -1; // ensure runState is set
+ stealCount.getAndAdd(w.totalSteals + w.nsteals);
+ int idx = w.poolIndex;
+ lock.lock();
+ try { // remove record from array
+ WorkQueue[] ws = workQueues;
+ if (ws != null && idx >= 0 && idx < ws.length && ws[idx] == w)
+ ws[idx] = null;
+ } finally {
+ lock.unlock();
+ }
+ }
- private static int runStateOf(int c) { return c >>> 16; }
- private static int activeCountOf(int c) { return c & shortMask; }
- private static int runControlFor(int r, int a) { return (r << 16) + a; }
+ long c; // adjust ctl counts
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, (((c - AC_UNIT) & AC_MASK) |
+ ((c - TC_UNIT) & TC_MASK) |
+ (c & ~(AC_MASK|TC_MASK)))));
+
+ if (!tryTerminate(false, false) && w != null) {
+ w.cancelAll(); // cancel remaining tasks
+ if (w.array != null) // suppress signal if never ran
+ signalWork(); // wake up or create replacement
+ if (ex == null) // help clean refs on way out
+ ForkJoinTask.helpExpungeStaleExceptions();
+ }
- /**
- * Try incrementing active count; fail on contention. Called by
- * workers before/during executing tasks.
- * @return true on success;
- */
- final boolean tryIncrementActiveCount() {
- int c = runControl;
- return casRunControl(c, c+1);
+ if (ex != null) // rethrow
+ U.throwException(ex);
}
+
+ // Submissions
+
/**
- * Try decrementing active count; fail on contention.
- * Possibly trigger termination on success
- * Called by workers when they can't find tasks.
- * @return true on success
- */
- final boolean tryDecrementActiveCount() {
- int c = runControl;
- int nextc = c - 1;
- if (!casRunControl(c, nextc))
- return false;
- if (canTerminateOnShutdown(nextc))
- terminateOnShutdown();
- return true;
+ * Unless shutting down, adds the given task to a submission queue
+ * at submitter's current queue index (modulo submission
+ * range). If no queue exists at the index, one is created. If
+ * the queue is busy, another index is randomly chosen. The
+ * submitMask bounds the effective number of queues to the
+ * (nearest power of two for) parallelism level.
+ *
+ * @param task the task. Caller must ensure non-null.
+ */
+ private void doSubmit(ForkJoinTask<?> task) {
+ Submitter s = submitters.get();
+ for (int r = s.seed, m = submitMask;;) {
+ WorkQueue[] ws; WorkQueue q;
+ int k = r & m & SQMASK; // use only even indices
+ if (runState < 0 || (ws = workQueues) == null || ws.length <= k)
+ throw new RejectedExecutionException(); // shutting down
+ else if ((q = ws[k]) == null) { // create new queue
+ WorkQueue nq = new WorkQueue(this, null, SHARED_QUEUE);
+ Mutex lock = this.lock; // construct outside lock
+ lock.lock();
+ try { // recheck under lock
+ int rs = runState; // to update seq
+ if (ws == workQueues && ws[k] == null) {
+ ws[k] = nq;
+ runState = ((rs & SHUTDOWN) | ((rs + 2) & ~SHUTDOWN));
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+ else if (q.trySharedPush(task)) {
+ signalWork();
+ return;
+ }
+ else if (m > 1) { // move to a different index
+ r ^= r << 13; // same xorshift as WorkQueues
+ r ^= r >>> 17;
+ s.seed = r ^= r << 5;
+ }
+ else
+ Thread.yield(); // yield if no alternatives
+ }
}
+ // Maintaining ctl counts
+
/**
- * Return true if argument represents zero active count and
- * nonzero runstate, which is the triggering condition for
- * terminating on shutdown.
+ * Increments active count; mainly called upon return from blocking.
*/
- private static boolean canTerminateOnShutdown(int c) {
- return ((c & -c) >>> 16) != 0; // i.e. least bit is nonzero runState bit
+ final void incrementActiveCount() {
+ long c;
+ do {} while (!U.compareAndSwapLong(this, CTL, c = ctl, c + AC_UNIT));
}
/**
- * Transition run state to at least the given state. Return true
- * if not already at least given state.
+ * Tries to activate or create a worker if too few are active.
*/
- private boolean transitionRunStateTo(int state) {
- for (;;) {
- int c = runControl;
- if (runStateOf(c) >= state)
- return false;
- if (casRunControl(c, runControlFor(state, activeCountOf(c))))
- return true;
+ final void signalWork() {
+ long c; int u;
+ while ((u = (int)((c = ctl) >>> 32)) < 0) { // too few active
+ WorkQueue[] ws = workQueues; int e, i; WorkQueue w; Thread p;
+ if ((e = (int)c) > 0) { // at least one waiting
+ if (ws != null && (i = e & SMASK) < ws.length &&
+ (w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
+ long nc = (((long)(w.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p); // activate and release
+ break;
+ }
+ }
+ else
+ break;
+ }
+ else if (e == 0 && (u & SHORT_SIGN) != 0) { // too few total
+ long nc = (long)(((u + UTC_UNIT) & UTC_MASK) |
+ ((u + UAC_UNIT) & UAC_MASK)) << 32;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ addWorker();
+ break;
+ }
+ }
+ else
+ break;
}
}
- /**
- * Controls whether to add spares to maintain parallelism
- */
- private volatile boolean maintainsParallelism;
- // Constructors
+ // Scanning for tasks
/**
- * Creates a ForkJoinPool with a pool size equal to the number of
- * processors available on the system and using the default
- * ForkJoinWorkerThreadFactory,
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * Top-level runloop for workers, called by ForkJoinWorkerThread.run.
*/
- public ForkJoinPool() {
- this(Runtime.getRuntime().availableProcessors(),
- defaultForkJoinWorkerThreadFactory);
+ final void runWorker(WorkQueue w) {
+ w.growArray(false); // initialize queue array in this thread
+ do {} while (w.runTask(scan(w)));
}
/**
- * Creates a ForkJoinPool with the indicated parellelism level
- * threads, and using the default ForkJoinWorkerThreadFactory,
- * @param parallelism the number of worker threads
- * @throws IllegalArgumentException if parallelism less than or
- * equal to zero
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
- */
- public ForkJoinPool(int parallelism) {
- this(parallelism, defaultForkJoinWorkerThreadFactory);
+ * Scans for and, if found, returns one task, else possibly
+ * inactivates the worker. This method operates on single reads of
+ * volatile state and is designed to be re-invoked continuously,
+ * in part because it returns upon detecting inconsistencies,
+ * contention, or state changes that indicate possible success on
+ * re-invocation.
+ *
+ * The scan searches for tasks across a random permutation of
+ * queues (starting at a random index and stepping by a random
+ * relative prime, checking each at least once). The scan
+ * terminates upon either finding a non-empty queue, or completing
+ * the sweep. If the worker is not inactivated, it takes and
+ * returns a task from this queue. On failure to find a task, we
+ * take one of the following actions, after which the caller will
+ * retry calling this method unless terminated.
+ *
+ * * If pool is terminating, terminate the worker.
+ *
+ * * If not a complete sweep, try to release a waiting worker. If
+ * the scan terminated because the worker is inactivated, then the
+ * released worker will often be the calling worker, and it can
+ * succeed obtaining a task on the next call. Or maybe it is
+ * another worker, but with same net effect. Releasing in other
+ * cases as well ensures that we have enough workers running.
+ *
+ * * If not already enqueued, try to inactivate and enqueue the
+ * worker on wait queue. Or, if inactivating has caused the pool
+ * to be quiescent, relay to idleAwaitWork to check for
+ * termination and possibly shrink pool.
+ *
+ * * If already inactive, and the caller has run a task since the
+ * last empty scan, return (to allow rescan) unless others are
+ * also inactivated. Field WorkQueue.rescans counts down on each
+ * scan to ensure eventual inactivation and blocking.
+ *
+ * * If already enqueued and none of the above apply, park
+ * awaiting signal,
+ *
+ * @param w the worker (via its WorkQueue)
+ * @return a task or null of none found
+ */
+ private final ForkJoinTask<?> scan(WorkQueue w) {
+ WorkQueue[] ws; // first update random seed
+ int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
+ int rs = runState, m; // volatile read order matters
+ if ((ws = workQueues) != null && (m = ws.length - 1) > 0) {
+ int ec = w.eventCount; // ec is negative if inactive
+ int step = (r >>> 16) | 1; // relative prime
+ for (int j = (m + 1) << 2; ; r += step) {
+ WorkQueue q; ForkJoinTask<?> t; ForkJoinTask<?>[] a; int b;
+ if ((q = ws[r & m]) != null && (b = q.base) - q.top < 0 &&
+ (a = q.array) != null) { // probably nonempty
+ int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, i);
+ if (q.base == b && ec >= 0 && t != null &&
+ U.compareAndSwapObject(a, i, t, null)) {
+ q.base = b + 1; // specialization of pollAt
+ return t;
+ }
+ else if ((t != null || b + 1 != q.top) &&
+ (ec < 0 || j <= m)) {
+ rs = 0; // mark scan as imcomplete
+ break; // caller can retry after release
+ }
+ }
+ if (--j < 0)
+ break;
+ }
+ long c = ctl; int e = (int)c, a = (int)(c >> AC_SHIFT), nr, ns;
+ if (e < 0) // decode ctl on empty scan
+ w.runState = -1; // pool is terminating
+ else if (rs == 0 || rs != runState) { // incomplete scan
+ WorkQueue v; Thread p; // try to release a waiter
+ if (e > 0 && a < 0 && w.eventCount == ec &&
+ (v = ws[e & m]) != null && v.eventCount == (e | INT_SIGN)) {
+ long nc = ((long)(v.nextWait & E_MASK) |
+ ((c + AC_UNIT) & (AC_MASK|TC_MASK)));
+ if (ctl == c && U.compareAndSwapLong(this, CTL, c, nc)) {
+ v.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = v.parker) != null)
+ U.unpark(p);
+ }
+ }
+ }
+ else if (ec >= 0) { // try to enqueue/inactivate
+ long nc = (long)ec | ((c - AC_UNIT) & (AC_MASK|TC_MASK));
+ w.nextWait = e;
+ w.eventCount = ec | INT_SIGN; // mark as inactive
+ if (ctl != c || !U.compareAndSwapLong(this, CTL, c, nc))
+ w.eventCount = ec; // unmark on CAS failure
+ else {
+ if ((ns = w.nsteals) != 0) {
+ w.nsteals = 0; // set rescans if ran task
+ w.rescans = (a > 0) ? 0 : a + parallelism;
+ w.totalSteals += ns;
+ }
+ if (a == 1 - parallelism) // quiescent
+ idleAwaitWork(w, nc, c);
+ }
+ }
+ else if (w.eventCount < 0) { // already queued
+ if ((nr = w.rescans) > 0) { // continue rescanning
+ int ac = a + parallelism;
+ if (((w.rescans = (ac < nr) ? ac : nr - 1) & 3) == 0)
+ Thread.yield(); // yield before block
+ }
+ else {
+ Thread.interrupted(); // clear status
+ Thread wt = Thread.currentThread();
+ U.putObject(wt, PARKBLOCKER, this);
+ w.parker = wt; // emulate LockSupport.park
+ if (w.eventCount < 0) // recheck
+ U.park(false, 0L);
+ w.parker = null;
+ U.putObject(wt, PARKBLOCKER, null);
+ }
+ }
+ }
+ return null;
}
/**
- * Creates a ForkJoinPool with parallelism equal to the number of
- * processors available on the system and using the given
- * ForkJoinWorkerThreadFactory,
- * @param factory the factory for creating new threads
- * @throws NullPointerException if factory is null
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
- */
- public ForkJoinPool(ForkJoinWorkerThreadFactory factory) {
- this(Runtime.getRuntime().availableProcessors(), factory);
+ * If inactivating worker w has caused the pool to become
+ * quiescent, checks for pool termination, and, so long as this is
+ * not the only worker, waits for event for up to SHRINK_RATE
+ * nanosecs. On timeout, if ctl has not changed, terminates the
+ * worker, which will in turn wake up another worker to possibly
+ * repeat this process.
+ *
+ * @param w the calling worker
+ * @param currentCtl the ctl value triggering possible quiescence
+ * @param prevCtl the ctl value to restore if thread is terminated
+ */
+ private void idleAwaitWork(WorkQueue w, long currentCtl, long prevCtl) {
+ if (w.eventCount < 0 && !tryTerminate(false, false) &&
+ (int)prevCtl != 0 && ctl == currentCtl) {
+ Thread wt = Thread.currentThread();
+ Thread.yield(); // yield before block
+ while (ctl == currentCtl) {
+ long startTime = System.nanoTime();
+ Thread.interrupted(); // timed variant of version in scan()
+ U.putObject(wt, PARKBLOCKER, this);
+ w.parker = wt;
+ if (ctl == currentCtl)
+ U.park(false, SHRINK_RATE);
+ w.parker = null;
+ U.putObject(wt, PARKBLOCKER, null);
+ if (ctl != currentCtl)
+ break;
+ if (System.nanoTime() - startTime >= SHRINK_TIMEOUT &&
+ U.compareAndSwapLong(this, CTL, currentCtl, prevCtl)) {
+ w.eventCount = (w.eventCount + E_SEQ) | E_MASK;
+ w.runState = -1; // shrink
+ break;
+ }
+ }
+ }
}
/**
- * Creates a ForkJoinPool with the given parallelism and factory.
+ * Tries to locate and execute tasks for a stealer of the given
+ * task, or in turn one of its stealers, Traces currentSteal ->
+ * currentJoin links looking for a thread working on a descendant
+ * of the given task and with a non-empty queue to steal back and
+ * execute tasks from. The first call to this method upon a
+ * waiting join will often entail scanning/search, (which is OK
+ * because the joiner has nothing better to do), but this method
+ * leaves hints in workers to speed up subsequent calls. The
+ * implementation is very branchy to cope with potential
+ * inconsistencies or loops encountering chains that are stale,
+ * unknown, or so long that they are likely cyclic. All of these
+ * cases are dealt with by just retrying by caller.
*
- * @param parallelism the targeted number of worker threads
- * @param factory the factory for creating new threads
- * @throws IllegalArgumentException if parallelism less than or
- * equal to zero, or greater than implementation limit.
- * @throws NullPointerException if factory is null
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
- */
- public ForkJoinPool(int parallelism, ForkJoinWorkerThreadFactory factory) {
- if (parallelism <= 0 || parallelism > MAX_THREADS)
- throw new IllegalArgumentException();
- if (factory == null)
- throw new NullPointerException();
- checkPermission();
- this.factory = factory;
- this.parallelism = parallelism;
- this.maxPoolSize = MAX_THREADS;
- this.maintainsParallelism = true;
- this.poolNumber = poolNumberGenerator.incrementAndGet();
- this.workerLock = new ReentrantLock();
- this.termination = workerLock.newCondition();
- this.stealCount = new AtomicLong();
- this.submissionQueue = new LinkedTransferQueue<ForkJoinTask<?>>();
- // worker array and workers are lazily constructed
- }
+ * @param joiner the joining worker
+ * @param task the task to join
+ * @return true if found or ran a task (and so is immediately retryable)
+ */
+ private boolean tryHelpStealer(WorkQueue joiner, ForkJoinTask<?> task) {
+ WorkQueue[] ws;
+ int m, depth = MAX_HELP; // remaining chain depth
+ boolean progress = false;
+ if ((ws = workQueues) != null && (m = ws.length - 1) > 0 &&
+ task.status >= 0) {
+ ForkJoinTask<?> subtask = task; // current target
+ outer: for (WorkQueue j = joiner;;) {
+ WorkQueue stealer = null; // find stealer of subtask
+ WorkQueue v = ws[j.stealHint & m]; // try hint
+ if (v != null && v.currentSteal == subtask)
+ stealer = v;
+ else { // scan
+ for (int i = 1; i <= m; i += 2) {
+ if ((v = ws[i]) != null && v.currentSteal == subtask &&
+ v != joiner) {
+ stealer = v;
+ j.stealHint = i; // save hint
+ break;
+ }
+ }
+ if (stealer == null)
+ break;
+ }
- /**
- * Create new worker using factory.
- * @param index the index to assign worker
- * @return new worker, or null of factory failed
- */
- private ForkJoinWorkerThread createWorker(int index) {
- Thread.UncaughtExceptionHandler h = ueh;
- ForkJoinWorkerThread w = factory.newThread(this);
- if (w != null) {
- w.poolIndex = index;
- w.setDaemon(true);
- w.setAsyncMode(locallyFifo);
- w.setName("ForkJoinPool-" + poolNumber + "-worker-" + index);
- if (h != null)
- w.setUncaughtExceptionHandler(h);
+ for (WorkQueue q = stealer;;) { // try to help stealer
+ ForkJoinTask[] a; ForkJoinTask<?> t; int b;
+ if (task.status < 0)
+ break outer;
+ if ((b = q.base) - q.top < 0 && (a = q.array) != null) {
+ progress = true;
+ int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, i);
+ if (subtask.status < 0) // must recheck before taking
+ break outer;
+ if (t != null &&
+ q.base == b &&
+ U.compareAndSwapObject(a, i, t, null)) {
+ q.base = b + 1;
+ joiner.runSubtask(t);
+ }
+ else if (q.base == b)
+ break outer; // possibly stalled
+ }
+ else { // descend
+ ForkJoinTask<?> next = stealer.currentJoin;
+ if (--depth <= 0 || subtask.status < 0 ||
+ next == null || next == subtask)
+ break outer; // stale, dead-end, or cyclic
+ subtask = next;
+ j = stealer;
+ break;
+ }
+ }
+ }
}
- return w;
+ return progress;
}
/**
- * Return a good size for worker array given pool size.
- * Currently requires size to be a power of two.
+ * If task is at base of some steal queue, steals and executes it.
+ *
+ * @param joiner the joining worker
+ * @param task the task
*/
- private static int arraySizeFor(int ps) {
- return ps <= 1? 1 : (1 << (32 - Integer.numberOfLeadingZeros(ps-1)));
+ private void tryPollForAndExec(WorkQueue joiner, ForkJoinTask<?> task) {
+ WorkQueue[] ws;
+ if ((ws = workQueues) != null) {
+ for (int j = 1; j < ws.length && task.status >= 0; j += 2) {
+ WorkQueue q = ws[j];
+ if (q != null && q.pollFor(task)) {
+ joiner.runSubtask(task);
+ break;
+ }
+ }
+ }
}
- public static ForkJoinWorkerThread[] copyOfWorkers(ForkJoinWorkerThread[] original, int newLength) {
- ForkJoinWorkerThread[] copy = new ForkJoinWorkerThread[newLength];
- System.arraycopy(original, 0, copy, 0, Math.min(newLength, original.length));
- return copy;
+ /**
+ * Tries to decrement active count (sometimes implicitly) and
+ * possibly release or create a compensating worker in preparation
+ * for blocking. Fails on contention or termination. Otherwise,
+ * adds a new thread if no idle workers are available and either
+ * pool would become completely starved or: (at least half
+ * starved, and fewer than 50% spares exist, and there is at least
+ * one task apparently available). Even though the availability
+ * check requires a full scan, it is worthwhile in reducing false
+ * alarms.
+ *
+ * @param task if non-null, a task being waited for
+ * @param blocker if non-null, a blocker being waited for
+ * @return true if the caller can block, else should recheck and retry
+ */
+ final boolean tryCompensate(ForkJoinTask<?> task, ManagedBlocker blocker) {
+ int pc = parallelism, e;
+ long c = ctl;
+ WorkQueue[] ws = workQueues;
+ if ((e = (int)c) >= 0 && ws != null) {
+ int u, a, ac, hc;
+ int tc = (short)((u = (int)(c >>> 32)) >>> UTC_SHIFT) + pc;
+ boolean replace = false;
+ if ((a = u >> UAC_SHIFT) <= 0) {
+ if ((ac = a + pc) <= 1)
+ replace = true;
+ else if ((e > 0 || (task != null &&
+ ac <= (hc = pc >>> 1) && tc < pc + hc))) {
+ WorkQueue w;
+ for (int j = 0; j < ws.length; ++j) {
+ if ((w = ws[j]) != null && !w.isEmpty()) {
+ replace = true;
+ break; // in compensation range and tasks available
+ }
+ }
+ }
+ }
+ if ((task == null || task.status >= 0) && // recheck need to block
+ (blocker == null || !blocker.isReleasable()) && ctl == c) {
+ if (!replace) { // no compensation
+ long nc = ((c - AC_UNIT) & AC_MASK) | (c & ~AC_MASK);
+ if (U.compareAndSwapLong(this, CTL, c, nc))
+ return true;
+ }
+ else if (e != 0) { // release an idle worker
+ WorkQueue w; Thread p; int i;
+ if ((i = e & SMASK) < ws.length && (w = ws[i]) != null) {
+ long nc = ((long)(w.nextWait & E_MASK) |
+ (c & (AC_MASK|TC_MASK)));
+ if (w.eventCount == (e | INT_SIGN) &&
+ U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ return true;
+ }
+ }
+ }
+ else if (tc < MAX_CAP) { // create replacement
+ long nc = ((c + TC_UNIT) & TC_MASK) | (c & ~TC_MASK);
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ addWorker();
+ return true;
+ }
+ }
+ }
+ }
+ return false;
}
/**
- * Create or resize array if necessary to hold newLength.
- * Call only under exlusion or lock
- * @return the array
- */
- private ForkJoinWorkerThread[] ensureWorkerArrayCapacity(int newLength) {
- ForkJoinWorkerThread[] ws = workers;
- if (ws == null)
- return workers = new ForkJoinWorkerThread[arraySizeFor(newLength)];
- else if (newLength > ws.length)
- return workers = copyOfWorkers(ws, arraySizeFor(newLength));
- else
- return ws;
+ * Helps and/or blocks until the given task is done.
+ *
+ * @param joiner the joining worker
+ * @param task the task
+ * @return task status on exit
+ */
+ final int awaitJoin(WorkQueue joiner, ForkJoinTask<?> task) {
+ ForkJoinTask<?> prevJoin = joiner.currentJoin;
+ joiner.currentJoin = task;
+ long startTime = 0L;
+ for (int k = 0, s; ; ++k) {
+ if ((joiner.isEmpty() ? // try to help
+ !tryHelpStealer(joiner, task) :
+ !joiner.tryRemoveAndExec(task))) {
+ if (k == 0) {
+ startTime = System.nanoTime();
+ tryPollForAndExec(joiner, task); // check uncommon case
+ }
+ else if ((k & (MAX_HELP - 1)) == 0 &&
+ System.nanoTime() - startTime >= COMPENSATION_DELAY &&
+ tryCompensate(task, null)) {
+ if (task.trySetSignal() && task.status >= 0) {
+ synchronized (task) {
+ if (task.status >= 0) {
+ try { // see ForkJoinTask
+ task.wait(); // for explanation
+ } catch (InterruptedException ie) {
+ }
+ }
+ else
+ task.notifyAll();
+ }
+ }
+ long c; // re-activate
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c + AC_UNIT));
+ }
+ }
+ if ((s = task.status) < 0) {
+ joiner.currentJoin = prevJoin;
+ return s;
+ }
+ else if ((k & (MAX_HELP - 1)) == MAX_HELP >>> 1)
+ Thread.yield(); // for politeness
+ }
}
/**
- * Try to shrink workers into smaller array after one or more terminate
+ * Stripped-down variant of awaitJoin used by timed joins. Tries
+ * to help join only while there is continuous progress. (Caller
+ * will then enter a timed wait.)
+ *
+ * @param joiner the joining worker
+ * @param task the task
+ * @return task status on exit
*/
- private void tryShrinkWorkerArray() {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- int len = ws.length;
- int last = len - 1;
- while (last >= 0 && ws[last] == null)
- --last;
- int newLength = arraySizeFor(last+1);
- if (newLength < len)
- workers = copyOfWorkers(ws, newLength);
+ final int helpJoinOnce(WorkQueue joiner, ForkJoinTask<?> task) {
+ int s;
+ while ((s = task.status) >= 0 &&
+ (joiner.isEmpty() ?
+ tryHelpStealer(joiner, task) :
+ joiner.tryRemoveAndExec(task)))
+ ;
+ return s;
+ }
+
+ /**
+ * Returns a (probably) non-empty steal queue, if one is found
+ * during a random, then cyclic scan, else null. This method must
+ * be retried by caller if, by the time it tries to use the queue,
+ * it is empty.
+ */
+ private WorkQueue findNonEmptyStealQueue(WorkQueue w) {
+ // Similar to loop in scan(), but ignoring submissions
+ int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
+ int step = (r >>> 16) | 1;
+ for (WorkQueue[] ws;;) {
+ int rs = runState, m;
+ if ((ws = workQueues) == null || (m = ws.length - 1) < 1)
+ return null;
+ for (int j = (m + 1) << 2; ; r += step) {
+ WorkQueue q = ws[((r << 1) | 1) & m];
+ if (q != null && !q.isEmpty())
+ return q;
+ else if (--j < 0) {
+ if (runState == rs)
+ return null;
+ break;
+ }
+ }
}
}
/**
- * Initialize workers if necessary
- */
- final void ensureWorkerInitialization() {
- ForkJoinWorkerThread[] ws = workers;
- if (ws == null) {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ws = workers;
- if (ws == null) {
- int ps = parallelism;
- ws = ensureWorkerArrayCapacity(ps);
- for (int i = 0; i < ps; ++i) {
- ForkJoinWorkerThread w = createWorker(i);
- if (w != null) {
- ws[i] = w;
- w.start();
- updateWorkerCount(1);
- }
- }
+ * Runs tasks until {@code isQuiescent()}. We piggyback on
+ * active count ctl maintenance, but rather than blocking
+ * when tasks cannot be found, we rescan until all others cannot
+ * find tasks either.
+ */
+ final void helpQuiescePool(WorkQueue w) {
+ for (boolean active = true;;) {
+ if (w.base - w.top < 0)
+ w.runLocalTasks(); // exhaust local queue
+ WorkQueue q = findNonEmptyStealQueue(w);
+ if (q != null) {
+ ForkJoinTask<?> t; int b;
+ if (!active) { // re-establish active count
+ long c;
+ active = true;
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c + AC_UNIT));
+ }
+ if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null)
+ w.runSubtask(t);
+ }
+ else {
+ long c;
+ if (active) { // decrement active count without queuing
+ active = false;
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c -= AC_UNIT));
+ }
+ else
+ c = ctl; // re-increment on exit
+ if ((int)(c >> AC_SHIFT) + parallelism == 0) {
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c + AC_UNIT));
+ break;
}
- } finally {
- lock.unlock();
}
}
}
/**
- * Worker creation and startup for threads added via setParallelism.
+ * Gets and removes a local or stolen task for the given worker.
+ *
+ * @return a task, if available
+ */
+ final ForkJoinTask<?> nextTaskFor(WorkQueue w) {
+ for (ForkJoinTask<?> t;;) {
+ WorkQueue q; int b;
+ if ((t = w.nextLocalTask()) != null)
+ return t;
+ if ((q = findNonEmptyStealQueue(w)) == null)
+ return null;
+ if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null)
+ return t;
+ }
+ }
+
+ /**
+ * Returns the approximate (non-atomic) number of idle threads per
+ * active thread to offset steal queue size for method
+ * ForkJoinTask.getSurplusQueuedTaskCount().
*/
- private void createAndStartAddedWorkers() {
- resumeAllSpares(); // Allow spares to convert to nonspare
- int ps = parallelism;
- ForkJoinWorkerThread[] ws = ensureWorkerArrayCapacity(ps);
- int len = ws.length;
- // Sweep through slots, to keep lowest indices most populated
- int k = 0;
- while (k < len) {
- if (ws[k] != null) {
- ++k;
- continue;
+ final int idlePerActive() {
+ // Approximate at powers of two for small values, saturate past 4
+ int p = parallelism;
+ int a = p + (int)(ctl >> AC_SHIFT);
+ return (a > (p >>>= 1) ? 0 :
+ a > (p >>>= 1) ? 1 :
+ a > (p >>>= 1) ? 2 :
+ a > (p >>>= 1) ? 4 :
+ 8);
+ }
+
+ // Termination
+
+ /**
+ * Possibly initiates and/or completes termination. The caller
+ * triggering termination runs three passes through workQueues:
+ * (0) Setting termination status, followed by wakeups of queued
+ * workers; (1) cancelling all tasks; (2) interrupting lagging
+ * threads (likely in external tasks, but possibly also blocked in
+ * joins). Each pass repeats previous steps because of potential
+ * lagging thread creation.
+ *
+ * @param now if true, unconditionally terminate, else only
+ * if no work and no active workers
+ * @param enable if true, enable shutdown when next possible
+ * @return true if now terminating or terminated
+ */
+ private boolean tryTerminate(boolean now, boolean enable) {
+ Mutex lock = this.lock;
+ for (long c;;) {
+ if (((c = ctl) & STOP_BIT) != 0) { // already terminating
+ if ((short)(c >>> TC_SHIFT) == -parallelism) {
+ lock.lock(); // don't need try/finally
+ termination.signalAll(); // signal when 0 workers
+ lock.unlock();
+ }
+ return true;
}
- int s = workerCounts;
- int tc = totalCountOf(s);
- int rc = runningCountOf(s);
- if (rc >= ps || tc >= ps)
- break;
- if (casWorkerCounts (s, workerCountsFor(tc+1, rc+1))) {
- ForkJoinWorkerThread w = createWorker(k);
- if (w != null) {
- ws[k++] = w;
- w.start();
+ if (runState >= 0) { // not yet enabled
+ if (!enable)
+ return false;
+ lock.lock();
+ runState |= SHUTDOWN;
+ lock.unlock();
+ }
+ if (!now) { // check if idle & no tasks
+ if ((int)(c >> AC_SHIFT) != -parallelism ||
+ hasQueuedSubmissions())
+ return false;
+ // Check for unqueued inactive workers. One pass suffices.
+ WorkQueue[] ws = workQueues; WorkQueue w;
+ if (ws != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && w.eventCount >= 0)
+ return false;
+ }
}
- else {
- updateWorkerCount(-1); // back out on failed creation
- break;
+ }
+ if (U.compareAndSwapLong(this, CTL, c, c | STOP_BIT)) {
+ for (int pass = 0; pass < 3; ++pass) {
+ WorkQueue[] ws = workQueues;
+ if (ws != null) {
+ WorkQueue w;
+ int n = ws.length;
+ for (int i = 0; i < n; ++i) {
+ if ((w = ws[i]) != null) {
+ w.runState = -1;
+ if (pass > 0) {
+ w.cancelAll();
+ if (pass > 1)
+ w.interruptOwner();
+ }
+ }
+ }
+ // Wake up workers parked on event queue
+ int i, e; long cc; Thread p;
+ while ((e = (int)(cc = ctl) & E_MASK) != 0 &&
+ (i = e & SMASK) < n &&
+ (w = ws[i]) != null) {
+ long nc = ((long)(w.nextWait & E_MASK) |
+ ((cc + AC_UNIT) & AC_MASK) |
+ (cc & (TC_MASK|STOP_BIT)));
+ if (w.eventCount == (e | INT_SIGN) &&
+ U.compareAndSwapLong(this, CTL, cc, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ w.runState = -1;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ }
+ }
+ }
}
}
}
}
- // Execution methods
+ // Exported methods
+
+ // Constructors
+
+ /**
+ * Creates a {@code ForkJoinPool} with parallelism equal to {@link
+ * java.lang.Runtime#availableProcessors}, using the {@linkplain
+ * #defaultForkJoinWorkerThreadFactory default thread factory},
+ * no UncaughtExceptionHandler, and non-async LIFO processing mode.
+ *
+ * @throws SecurityException if a security manager exists and
+ * the caller is not permitted to modify threads
+ * because it does not hold {@link
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
+ */
+ public ForkJoinPool() {
+ this(Runtime.getRuntime().availableProcessors(),
+ defaultForkJoinWorkerThreadFactory, null, false);
+ }
+
+ /**
+ * Creates a {@code ForkJoinPool} with the indicated parallelism
+ * level, the {@linkplain
+ * #defaultForkJoinWorkerThreadFactory default thread factory},
+ * no UncaughtExceptionHandler, and non-async LIFO processing mode.
+ *
+ * @param parallelism the parallelism level
+ * @throws IllegalArgumentException if parallelism less than or
+ * equal to zero, or greater than implementation limit
+ * @throws SecurityException if a security manager exists and
+ * the caller is not permitted to modify threads
+ * because it does not hold {@link
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
+ */
+ public ForkJoinPool(int parallelism) {
+ this(parallelism, defaultForkJoinWorkerThreadFactory, null, false);
+ }
/**
- * Common code for execute, invoke and submit
+ * Creates a {@code ForkJoinPool} with the given parameters.
+ *
+ * @param parallelism the parallelism level. For default value,
+ * use {@link java.lang.Runtime#availableProcessors}.
+ * @param factory the factory for creating new threads. For default value,
+ * use {@link #defaultForkJoinWorkerThreadFactory}.
+ * @param handler the handler for internal worker threads that
+ * terminate due to unrecoverable errors encountered while executing
+ * tasks. For default value, use {@code null}.
+ * @param asyncMode if true,
+ * establishes local first-in-first-out scheduling mode for forked
+ * tasks that are never joined. This mode may be more appropriate
+ * than default locally stack-based mode in applications in which
+ * worker threads only process event-style asynchronous tasks.
+ * For default value, use {@code false}.
+ * @throws IllegalArgumentException if parallelism less than or
+ * equal to zero, or greater than implementation limit
+ * @throws NullPointerException if the factory is null
+ * @throws SecurityException if a security manager exists and
+ * the caller is not permitted to modify threads
+ * because it does not hold {@link
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
- private <T> void doSubmit(ForkJoinTask<T> task) {
- if (isShutdown())
- throw new RejectedExecutionException();
- if (workers == null)
- ensureWorkerInitialization();
- submissionQueue.offer(task);
- signalIdleWorkers();
+ public ForkJoinPool(int parallelism,
+ ForkJoinWorkerThreadFactory factory,
+ Thread.UncaughtExceptionHandler handler,
+ boolean asyncMode) {
+ checkPermission();
+ if (factory == null)
+ throw new NullPointerException();
+ if (parallelism <= 0 || parallelism > MAX_CAP)
+ throw new IllegalArgumentException();
+ this.parallelism = parallelism;
+ this.factory = factory;
+ this.ueh = handler;
+ this.localMode = asyncMode ? FIFO_QUEUE : LIFO_QUEUE;
+ long np = (long)(-parallelism); // offset ctl counts
+ this.ctl = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
+ // Use nearest power 2 for workQueues size. See Hackers Delight sec 3.2.
+ int n = parallelism - 1;
+ n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16;
+ int size = (n + 1) << 1; // #slots = 2*#workers
+ this.submitMask = size - 1; // room for max # of submit queues
+ this.workQueues = new WorkQueue[size];
+ this.termination = (this.lock = new Mutex()).newCondition();
+ this.stealCount = new AtomicLong();
+ this.nextWorkerNumber = new AtomicInteger();
+ int pn = poolNumberGenerator.incrementAndGet();
+ StringBuilder sb = new StringBuilder("ForkJoinPool-");
+ sb.append(Integer.toString(pn));
+ sb.append("-worker-");
+ this.workerNamePrefix = sb.toString();
+ lock.lock();
+ this.runState = 1; // set init flag
+ lock.unlock();
}
+ // Execution methods
+
/**
- * Performs the given task; returning its result upon completion
+ * Performs the given task, returning its result upon completion.
+ * If the computation encounters an unchecked Exception or Error,
+ * it is rethrown as the outcome of this invocation. Rethrown
+ * exceptions behave in the same way as regular exceptions, but,
+ * when possible, contain stack traces (as displayed for example
+ * using {@code ex.printStackTrace()}) of both the current thread
+ * as well as the thread actually encountering the exception;
+ * minimally only the latter.
+ *
* @param task the task
* @return the task's result
- * @throws NullPointerException if task is null
- * @throws RejectedExecutionException if pool is shut down
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
*/
public <T> T invoke(ForkJoinTask<T> task) {
+ if (task == null)
+ throw new NullPointerException();
doSubmit(task);
return task.join();
}
/**
* Arranges for (asynchronous) execution of the given task.
+ *
* @param task the task
- * @throws NullPointerException if task is null
- * @throws RejectedExecutionException if pool is shut down
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
*/
- public <T> void execute(ForkJoinTask<T> task) {
+ public void execute(ForkJoinTask<?> task) {
+ if (task == null)
+ throw new NullPointerException();
doSubmit(task);
}
// AbstractExecutorService methods
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
public void execute(Runnable task) {
- doSubmit(new AdaptedRunnable<Void>(task, null));
+ if (task == null)
+ throw new NullPointerException();
+ ForkJoinTask<?> job;
+ if (task instanceof ForkJoinTask<?>) // avoid re-wrap
+ job = (ForkJoinTask<?>) task;
+ else
+ job = new ForkJoinTask.AdaptedRunnableAction(task);
+ doSubmit(job);
+ }
+
+ /**
+ * Submits a ForkJoinTask for execution.
+ *
+ * @param task the task to submit
+ * @return the task
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
+ public <T> ForkJoinTask<T> submit(ForkJoinTask<T> task) {
+ if (task == null)
+ throw new NullPointerException();
+ doSubmit(task);
+ return task;
}
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
public <T> ForkJoinTask<T> submit(Callable<T> task) {
- ForkJoinTask<T> job = new AdaptedCallable<T>(task);
+ ForkJoinTask<T> job = new ForkJoinTask.AdaptedCallable<T>(task);
doSubmit(job);
return job;
}
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
public <T> ForkJoinTask<T> submit(Runnable task, T result) {
- ForkJoinTask<T> job = new AdaptedRunnable<T>(task, result);
+ ForkJoinTask<T> job = new ForkJoinTask.AdaptedRunnable<T>(task, result);
doSubmit(job);
return job;
}
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
public ForkJoinTask<?> submit(Runnable task) {
- ForkJoinTask<Void> job = new AdaptedRunnable<Void>(task, null);
+ if (task == null)
+ throw new NullPointerException();
+ ForkJoinTask<?> job;
+ if (task instanceof ForkJoinTask<?>) // avoid re-wrap
+ job = (ForkJoinTask<?>) task;
+ else
+ job = new ForkJoinTask.AdaptedRunnableAction(task);
doSubmit(job);
return job;
}
/**
- * Adaptor for Runnables. This implements RunnableFuture
- * to be compliant with AbstractExecutorService constraints
+ * @throws NullPointerException {@inheritDoc}
+ * @throws RejectedExecutionException {@inheritDoc}
*/
- static final class AdaptedRunnable<T> extends ForkJoinTask<T>
- implements RunnableFuture<T> {
- final Runnable runnable;
- final T resultOnCompletion;
- T result;
- AdaptedRunnable(Runnable runnable, T result) {
- if (runnable == null) throw new NullPointerException();
- this.runnable = runnable;
- this.resultOnCompletion = result;
- }
- public T getRawResult() { return result; }
- public void setRawResult(T v) { result = v; }
- public boolean exec() {
- runnable.run();
- result = resultOnCompletion;
- return true;
- }
- public void run() { invoke(); }
- }
-
- /**
- * Adaptor for Callables
- */
- static final class AdaptedCallable<T> extends ForkJoinTask<T>
- implements RunnableFuture<T> {
- final Callable<T> callable;
- T result;
- AdaptedCallable(Callable<T> callable) {
- if (callable == null) throw new NullPointerException();
- this.callable = callable;
- }
- public T getRawResult() { return result; }
- public void setRawResult(T v) { result = v; }
- public boolean exec() {
- try {
- result = callable.call();
- return true;
- } catch (Error err) {
- throw err;
- } catch (RuntimeException rex) {
- throw rex;
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- }
- public void run() { invoke(); }
- }
-
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) {
- ArrayList<ForkJoinTask<T>> ts =
- new ArrayList<ForkJoinTask<T>>(tasks.size());
- for (Callable<T> c : tasks)
- ts.add(new AdaptedCallable<T>(c));
- invoke(new InvokeAll<T>(ts));
- return (List<Future<T>>)(List)ts;
- }
-
- static final class InvokeAll<T> extends RecursiveAction {
- final ArrayList<ForkJoinTask<T>> tasks;
- InvokeAll(ArrayList<ForkJoinTask<T>> tasks) { this.tasks = tasks; }
- public void compute() {
- try { invokeAll(tasks); } catch(Exception ignore) {}
+ // In previous versions of this class, this method constructed
+ // a task to run ForkJoinTask.invokeAll, but now external
+ // invocation of multiple tasks is at least as efficient.
+ List<ForkJoinTask<T>> fs = new ArrayList<ForkJoinTask<T>>(tasks.size());
+ // Workaround needed because method wasn't declared with
+ // wildcards in return type but should have been.
+ @SuppressWarnings({"unchecked", "rawtypes"})
+ List<Future<T>> futures = (List<Future<T>>) (List) fs;
+
+ boolean done = false;
+ try {
+ for (Callable<T> t : tasks) {
+ ForkJoinTask<T> f = new ForkJoinTask.AdaptedCallable<T>(t);
+ doSubmit(f);
+ fs.add(f);
+ }
+ for (ForkJoinTask<T> f : fs)
+ f.quietlyJoin();
+ done = true;
+ return futures;
+ } finally {
+ if (!done)
+ for (ForkJoinTask<T> f : fs)
+ f.cancel(false);
}
}
- // Configuration and status settings and queries
-
/**
- * Returns the factory used for constructing new workers
+ * Returns the factory used for constructing new workers.
*
* @return the factory used for constructing new workers
*/
@@ -674,92 +2329,17 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
/**
* Returns the handler for internal worker threads that terminate
* due to unrecoverable errors encountered while executing tasks.
- * @return the handler, or null if none
- */
- public Thread.UncaughtExceptionHandler getUncaughtExceptionHandler() {
- Thread.UncaughtExceptionHandler h;
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- h = ueh;
- } finally {
- lock.unlock();
- }
- return h;
- }
-
- /**
- * Sets the handler for internal worker threads that terminate due
- * to unrecoverable errors encountered while executing tasks.
- * Unless set, the current default or ThreadGroup handler is used
- * as handler.
*
- * @param h the new handler
- * @return the old handler, or null if none
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * @return the handler, or {@code null} if none
*/
- public Thread.UncaughtExceptionHandler
- setUncaughtExceptionHandler(Thread.UncaughtExceptionHandler h) {
- checkPermission();
- Thread.UncaughtExceptionHandler old = null;
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- old = ueh;
- ueh = h;
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread w = ws[i];
- if (w != null)
- w.setUncaughtExceptionHandler(h);
- }
- }
- } finally {
- lock.unlock();
- }
- return old;
- }
-
-
- /**
- * Sets the target paralleism level of this pool.
- * @param parallelism the target parallelism
- * @throws IllegalArgumentException if parallelism less than or
- * equal to zero or greater than maximum size bounds.
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
- */
- public void setParallelism(int parallelism) {
- checkPermission();
- if (parallelism <= 0 || parallelism > maxPoolSize)
- throw new IllegalArgumentException();
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- if (!isTerminating()) {
- int p = this.parallelism;
- this.parallelism = parallelism;
- if (parallelism > p)
- createAndStartAddedWorkers();
- else
- trimSpares();
- }
- } finally {
- lock.unlock();
- }
- signalIdleWorkers();
+ public Thread.UncaughtExceptionHandler getUncaughtExceptionHandler() {
+ return ueh;
}
/**
- * Returns the targeted number of worker threads in this pool.
+ * Returns the targeted parallelism level of this pool.
*
- * @return the targeted number of worker threads in this pool
+ * @return the targeted parallelism level of this pool
*/
public int getParallelism() {
return parallelism;
@@ -767,141 +2347,71 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
/**
* Returns the number of worker threads that have started but not
- * yet terminated. This result returned by this method may differ
- * from <code>getParallelism</code> when threads are created to
+ * yet terminated. The result returned by this method may differ
+ * from {@link #getParallelism} when threads are created to
* maintain parallelism when others are cooperatively blocked.
*
* @return the number of worker threads
*/
public int getPoolSize() {
- return totalCountOf(workerCounts);
+ return parallelism + (short)(ctl >>> TC_SHIFT);
}
/**
- * Returns the maximum number of threads allowed to exist in the
- * pool, even if there are insufficient unblocked running threads.
- * @return the maximum
- */
- public int getMaximumPoolSize() {
- return maxPoolSize;
- }
-
- /**
- * Sets the maximum number of threads allowed to exist in the
- * pool, even if there are insufficient unblocked running threads.
- * Setting this value has no effect on current pool size. It
- * controls construction of new threads.
- * @throws IllegalArgumentException if negative or greater then
- * internal implementation limit.
- */
- public void setMaximumPoolSize(int newMax) {
- if (newMax < 0 || newMax > MAX_THREADS)
- throw new IllegalArgumentException();
- maxPoolSize = newMax;
- }
-
-
- /**
- * Returns true if this pool dynamically maintains its target
- * parallelism level. If false, new threads are added only to
- * avoid possible starvation.
- * This setting is by default true;
- * @return true if maintains parallelism
- */
- public boolean getMaintainsParallelism() {
- return maintainsParallelism;
- }
-
- /**
- * Sets whether this pool dynamically maintains its target
- * parallelism level. If false, new threads are added only to
- * avoid possible starvation.
- * @param enable true to maintains parallelism
- */
- public void setMaintainsParallelism(boolean enable) {
- maintainsParallelism = enable;
- }
-
- /**
- * Establishes local first-in-first-out scheduling mode for forked
- * tasks that are never joined. This mode may be more appropriate
- * than default locally stack-based mode in applications in which
- * worker threads only process asynchronous tasks. This method is
- * designed to be invoked only when pool is quiescent, and
- * typically only before any tasks are submitted. The effects of
- * invocations at ather times may be unpredictable.
- *
- * @param async if true, use locally FIFO scheduling
- * @return the previous mode.
- */
- public boolean setAsyncMode(boolean async) {
- boolean oldMode = locallyFifo;
- locallyFifo = async;
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- t.setAsyncMode(async);
- }
- }
- return oldMode;
- }
-
- /**
- * Returns true if this pool uses local first-in-first-out
+ * Returns {@code true} if this pool uses local first-in-first-out
* scheduling mode for forked tasks that are never joined.
*
- * @return true if this pool uses async mode.
+ * @return {@code true} if this pool uses async mode
*/
public boolean getAsyncMode() {
- return locallyFifo;
+ return localMode != 0;
}
/**
* Returns an estimate of the number of worker threads that are
* not blocked waiting to join tasks or for other managed
- * synchronization.
+ * synchronization. This method may overestimate the
+ * number of running threads.
*
* @return the number of worker threads
*/
public int getRunningThreadCount() {
- return runningCountOf(workerCounts);
+ int rc = 0;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && w.isApparentlyUnblocked())
+ ++rc;
+ }
+ }
+ return rc;
}
/**
* Returns an estimate of the number of threads that are currently
* stealing or executing tasks. This method may overestimate the
* number of active threads.
- * @return the number of active threads.
+ *
+ * @return the number of active threads
*/
public int getActiveThreadCount() {
- return activeCountOf(runControl);
- }
-
- /**
- * Returns an estimate of the number of threads that are currently
- * idle waiting for tasks. This method may underestimate the
- * number of idle threads.
- * @return the number of idle threads.
- */
- final int getIdleThreadCount() {
- int c = runningCountOf(workerCounts) - activeCountOf(runControl);
- return (c <= 0)? 0 : c;
+ int r = parallelism + (int)(ctl >> AC_SHIFT);
+ return (r <= 0) ? 0 : r; // suppress momentarily negative values
}
/**
- * Returns true if all worker threads are currently idle. An idle
- * worker is one that cannot obtain a task to execute because none
- * are available to steal from other threads, and there are no
- * pending submissions to the pool. This method is conservative:
- * It might not return true immediately upon idleness of all
- * threads, but will eventually become true if threads remain
- * inactive.
- * @return true if all threads are currently idle
+ * Returns {@code true} if all worker threads are currently idle.
+ * An idle worker is one that cannot obtain a task to execute
+ * because none are available to steal from other threads, and
+ * there are no pending submissions to the pool. This method is
+ * conservative; it might not return {@code true} immediately upon
+ * idleness of all threads, but will eventually become true if
+ * threads remain inactive.
+ *
+ * @return {@code true} if all threads are currently idle
*/
public boolean isQuiescent() {
- return activeCountOf(runControl) == 0;
+ return (int)(ctl >> AC_SHIFT) + parallelism == 0;
}
/**
@@ -909,23 +2419,22 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* one thread's work queue by another. The reported value
* underestimates the actual total number of steals when the pool
* is not quiescent. This value may be useful for monitoring and
- * tuning fork/join programs: In general, steal counts should be
+ * tuning fork/join programs: in general, steal counts should be
* high enough to keep threads busy, but low enough to avoid
* overhead and contention across threads.
- * @return the number of steals.
+ *
+ * @return the number of steals
*/
public long getStealCount() {
- return stealCount.get();
- }
-
- /**
- * Accumulate steal count from a worker. Call only
- * when worker known to be idle.
- */
- private void updateStealCount(ForkJoinWorkerThread w) {
- int sc = w.getAndClearStealCount();
- if (sc != 0)
- stealCount.addAndGet(sc);
+ long count = stealCount.get();
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null)
+ count += w.totalSteals;
+ }
+ }
+ return count;
}
/**
@@ -935,77 +2444,106 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* an approximation, obtained by iterating across all threads in
* the pool. This method may be useful for tuning task
* granularities.
- * @return the number of queued tasks.
+ *
+ * @return the number of queued tasks
*/
public long getQueuedTaskCount() {
long count = 0;
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- count += t.getQueueSize();
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null)
+ count += w.queueSize();
}
}
return count;
}
/**
- * Returns an estimate of the number tasks submitted to this pool
- * that have not yet begun executing. This method takes time
- * proportional to the number of submissions.
- * @return the number of queued submissions.
+ * Returns an estimate of the number of tasks submitted to this
+ * pool that have not yet begun executing. This method may take
+ * time proportional to the number of submissions.
+ *
+ * @return the number of queued submissions
*/
public int getQueuedSubmissionCount() {
- return submissionQueue.size();
+ int count = 0;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null)
+ count += w.queueSize();
+ }
+ }
+ return count;
}
/**
- * Returns true if there are any tasks submitted to this pool
- * that have not yet begun executing.
- * @return <code>true</code> if there are any queued submissions.
+ * Returns {@code true} if there are any tasks submitted to this
+ * pool that have not yet begun executing.
+ *
+ * @return {@code true} if there are any queued submissions
*/
public boolean hasQueuedSubmissions() {
- return !submissionQueue.isEmpty();
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && !w.isEmpty())
+ return true;
+ }
+ }
+ return false;
}
/**
* Removes and returns the next unexecuted submission if one is
* available. This method may be useful in extensions to this
* class that re-assign work in systems with multiple pools.
- * @return the next submission, or null if none
+ *
+ * @return the next submission, or {@code null} if none
*/
protected ForkJoinTask<?> pollSubmission() {
- return submissionQueue.poll();
+ WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && (t = w.poll()) != null)
+ return t;
+ }
+ }
+ return null;
}
/**
* Removes all available unexecuted submitted and forked tasks
* from scheduling queues and adds them to the given collection,
* without altering their execution status. These may include
- * artifically generated or wrapped tasks. This method id designed
- * to be invoked only when the pool is known to be
+ * artificially generated or wrapped tasks. This method is
+ * designed to be invoked only when the pool is known to be
* quiescent. Invocations at other times may not remove all
* tasks. A failure encountered while attempting to add elements
- * to collection <tt>c</tt> may result in elements being in
+ * to collection {@code c} may result in elements being in
* neither, either or both collections when the associated
* exception is thrown. The behavior of this operation is
* undefined if the specified collection is modified while the
* operation is in progress.
+ *
* @param c the collection to transfer elements into
* @return the number of elements transferred
*/
- protected int drainTasksTo(Collection<ForkJoinTask<?>> c) {
- int n = submissionQueue.drainTo(c);
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
+ protected int drainTasksTo(Collection<? super ForkJoinTask<?>> c) {
+ int count = 0;
+ WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t;
+ if ((ws = workQueues) != null) {
for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread w = ws[i];
- if (w != null)
- n += w.drainTasksTo(c);
+ if ((w = ws[i]) != null) {
+ while ((t = w.poll()) != null) {
+ c.add(t);
+ ++count;
+ }
+ }
}
}
- return n;
+ return count;
}
/**
@@ -1016,101 +2554,124 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* @return a string identifying this pool, as well as its state
*/
public String toString() {
- int ps = parallelism;
- int wc = workerCounts;
- int rc = runControl;
- long st = getStealCount();
- long qt = getQueuedTaskCount();
- long qs = getQueuedSubmissionCount();
+ // Use a single pass through workQueues to collect counts
+ long qt = 0L, qs = 0L; int rc = 0;
+ long st = stealCount.get();
+ long c = ctl;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; ++i) {
+ if ((w = ws[i]) != null) {
+ int size = w.queueSize();
+ if ((i & 1) == 0)
+ qs += size;
+ else {
+ qt += size;
+ st += w.totalSteals;
+ if (w.isApparentlyUnblocked())
+ ++rc;
+ }
+ }
+ }
+ }
+ int pc = parallelism;
+ int tc = pc + (short)(c >>> TC_SHIFT);
+ int ac = pc + (int)(c >> AC_SHIFT);
+ if (ac < 0) // ignore transient negative
+ ac = 0;
+ String level;
+ if ((c & STOP_BIT) != 0)
+ level = (tc == 0) ? "Terminated" : "Terminating";
+ else
+ level = runState < 0 ? "Shutting down" : "Running";
return super.toString() +
- "[" + runStateToString(runStateOf(rc)) +
- ", parallelism = " + ps +
- ", size = " + totalCountOf(wc) +
- ", active = " + activeCountOf(rc) +
- ", running = " + runningCountOf(wc) +
+ "[" + level +
+ ", parallelism = " + pc +
+ ", size = " + tc +
+ ", active = " + ac +
+ ", running = " + rc +
", steals = " + st +
", tasks = " + qt +
", submissions = " + qs +
"]";
}
- private static String runStateToString(int rs) {
- switch(rs) {
- case RUNNING: return "Running";
- case SHUTDOWN: return "Shutting down";
- case TERMINATING: return "Terminating";
- case TERMINATED: return "Terminated";
- default: throw new Error("Unknown run state");
- }
- }
-
- // lifecycle control
-
/**
* Initiates an orderly shutdown in which previously submitted
* tasks are executed, but no new tasks will be accepted.
* Invocation has no additional effect if already shut down.
* Tasks that are in the process of being submitted concurrently
* during the course of this method may or may not be rejected.
+ *
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public void shutdown() {
checkPermission();
- transitionRunStateTo(SHUTDOWN);
- if (canTerminateOnShutdown(runControl))
- terminateOnShutdown();
+ tryTerminate(false, true);
}
/**
- * Attempts to stop all actively executing tasks, and cancels all
- * waiting tasks. Tasks that are in the process of being
- * submitted or executed concurrently during the course of this
- * method may or may not be rejected. Unlike some other executors,
- * this method cancels rather than collects non-executed tasks
- * upon termination, so always returns an empty list. However, you
- * can use method <code>drainTasksTo</code> before invoking this
- * method to transfer unexecuted tasks to another collection.
+ * Attempts to cancel and/or stop all tasks, and reject all
+ * subsequently submitted tasks. Tasks that are in the process of
+ * being submitted or executed concurrently during the course of
+ * this method may or may not be rejected. This method cancels
+ * both existing and unexecuted tasks, in order to permit
+ * termination in the presence of task dependencies. So the method
+ * always returns an empty list (unlike the case for some other
+ * Executors).
+ *
* @return an empty list
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public List<Runnable> shutdownNow() {
checkPermission();
- terminate();
+ tryTerminate(true, true);
return Collections.emptyList();
}
/**
- * Returns <code>true</code> if all tasks have completed following shut down.
+ * Returns {@code true} if all tasks have completed following shut down.
*
- * @return <code>true</code> if all tasks have completed following shut down
+ * @return {@code true} if all tasks have completed following shut down
*/
public boolean isTerminated() {
- return runStateOf(runControl) == TERMINATED;
+ long c = ctl;
+ return ((c & STOP_BIT) != 0L &&
+ (short)(c >>> TC_SHIFT) == -parallelism);
}
/**
- * Returns <code>true</code> if the process of termination has
- * commenced but possibly not yet completed.
+ * Returns {@code true} if the process of termination has
+ * commenced but not yet completed. This method may be useful for
+ * debugging. A return of {@code true} reported a sufficient
+ * period after shutdown may indicate that submitted tasks have
+ * ignored or suppressed interruption, or are waiting for IO,
+ * causing this executor not to properly terminate. (See the
+ * advisory notes for class {@link ForkJoinTask} stating that
+ * tasks should not normally entail blocking operations. But if
+ * they do, they must abort them on interrupt.)
*
- * @return <code>true</code> if terminating
+ * @return {@code true} if terminating but not yet terminated
*/
public boolean isTerminating() {
- return runStateOf(runControl) >= TERMINATING;
+ long c = ctl;
+ return ((c & STOP_BIT) != 0L &&
+ (short)(c >>> TC_SHIFT) != -parallelism);
}
/**
- * Returns <code>true</code> if this pool has been shut down.
+ * Returns {@code true} if this pool has been shut down.
*
- * @return <code>true</code> if this pool has been shut down
+ * @return {@code true} if this pool has been shut down
*/
public boolean isShutdown() {
- return runStateOf(runControl) >= SHUTDOWN;
+ return runState < 0;
}
/**
@@ -1120,14 +2681,14 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
- * @return <code>true</code> if this executor terminated and
- * <code>false</code> if the timeout elapsed before termination
+ * @return {@code true} if this executor terminated and
+ * {@code false} if the timeout elapsed before termination
* @throws InterruptedException if interrupted while waiting
*/
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
long nanos = unit.toNanos(timeout);
- final ReentrantLock lock = this.workerLock;
+ final Mutex lock = this.lock;
lock.lock();
try {
for (;;) {
@@ -1142,729 +2703,189 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
}
}
- // Shutdown and termination support
-
- /**
- * Callback from terminating worker. Null out the corresponding
- * workers slot, and if terminating, try to terminate, else try to
- * shrink workers array.
- * @param w the worker
- */
- final void workerTerminated(ForkJoinWorkerThread w) {
- updateStealCount(w);
- updateWorkerCount(-1);
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- int idx = w.poolIndex;
- if (idx >= 0 && idx < ws.length && ws[idx] == w)
- ws[idx] = null;
- if (totalCountOf(workerCounts) == 0) {
- terminate(); // no-op if already terminating
- transitionRunStateTo(TERMINATED);
- termination.signalAll();
- }
- else if (!isTerminating()) {
- tryShrinkWorkerArray();
- tryResumeSpare(true); // allow replacement
- }
- }
- } finally {
- lock.unlock();
- }
- signalIdleWorkers();
- }
-
- /**
- * Initiate termination.
- */
- private void terminate() {
- if (transitionRunStateTo(TERMINATING)) {
- stopAllWorkers();
- resumeAllSpares();
- signalIdleWorkers();
- cancelQueuedSubmissions();
- cancelQueuedWorkerTasks();
- interruptUnterminatedWorkers();
- signalIdleWorkers(); // resignal after interrupt
- }
- }
-
- /**
- * Possibly terminate when on shutdown state
- */
- private void terminateOnShutdown() {
- if (!hasQueuedSubmissions() && canTerminateOnShutdown(runControl))
- terminate();
- }
-
- /**
- * Clear out and cancel submissions
- */
- private void cancelQueuedSubmissions() {
- ForkJoinTask<?> task;
- while ((task = pollSubmission()) != null)
- task.cancel(false);
- }
-
- /**
- * Clean out worker queues.
- */
- private void cancelQueuedWorkerTasks() {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- t.cancelTasks();
- }
- }
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Set each worker's status to terminating. Requires lock to avoid
- * conflicts with add/remove
- */
- private void stopAllWorkers() {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- t.shutdownNow();
- }
- }
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Interrupt all unterminated workers. This is not required for
- * sake of internal control, but may help unstick user code during
- * shutdown.
- */
- private void interruptUnterminatedWorkers() {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null && !t.isTerminated()) {
- try {
- t.interrupt();
- } catch (SecurityException ignore) {
- }
- }
- }
- }
- } finally {
- lock.unlock();
- }
- }
-
-
- /*
- * Nodes for event barrier to manage idle threads. Queue nodes
- * are basic Treiber stack nodes, also used for spare stack.
- *
- * The event barrier has an event count and a wait queue (actually
- * a Treiber stack). Workers are enabled to look for work when
- * the eventCount is incremented. If they fail to find work, they
- * may wait for next count. Upon release, threads help others wake
- * up.
- *
- * Synchronization events occur only in enough contexts to
- * maintain overall liveness:
- *
- * - Submission of a new task to the pool
- * - Resizes or other changes to the workers array
- * - pool termination
- * - A worker pushing a task on an empty queue
- *
- * The case of pushing a task occurs often enough, and is heavy
- * enough compared to simple stack pushes, to require special
- * handling: Method signalWork returns without advancing count if
- * the queue appears to be empty. This would ordinarily result in
- * races causing some queued waiters not to be woken up. To avoid
- * this, the first worker enqueued in method sync (see
- * syncIsReleasable) rescans for tasks after being enqueued, and
- * helps signal if any are found. This works well because the
- * worker has nothing better to do, and so might as well help
- * alleviate the overhead and contention on the threads actually
- * doing work. Also, since event counts increments on task
- * availability exist to maintain liveness (rather than to force
- * refreshes etc), it is OK for callers to exit early if
- * contending with another signaller.
- */
- static final class WaitQueueNode {
- WaitQueueNode next; // only written before enqueued
- volatile ForkJoinWorkerThread thread; // nulled to cancel wait
- final long count; // unused for spare stack
-
- WaitQueueNode(long c, ForkJoinWorkerThread w) {
- count = c;
- thread = w;
- }
-
- /**
- * Wake up waiter, returning false if known to already
- */
- boolean signal() {
- ForkJoinWorkerThread t = thread;
- if (t == null)
- return false;
- thread = null;
- LockSupport.unpark(t);
- return true;
- }
-
- /**
- * Await release on sync
- */
- void awaitSyncRelease(ForkJoinPool p) {
- while (thread != null && !p.syncIsReleasable(this))
- LockSupport.park(this);
- }
-
- /**
- * Await resumption as spare
- */
- void awaitSpareRelease() {
- while (thread != null) {
- if (!Thread.interrupted())
- LockSupport.park(this);
- }
- }
- }
-
- /**
- * Ensures that no thread is waiting for count to advance from the
- * current value of eventCount read on entry to this method, by
- * releasing waiting threads if necessary.
- * @return the count
- */
- final long ensureSync() {
- long c = eventCount;
- WaitQueueNode q;
- while ((q = syncStack) != null && q.count < c) {
- if (casBarrierStack(q, null)) {
- do {
- q.signal();
- } while ((q = q.next) != null);
- break;
- }
- }
- return c;
- }
-
- /**
- * Increments event count and releases waiting threads.
- */
- private void signalIdleWorkers() {
- long c;
- do;while (!casEventCount(c = eventCount, c+1));
- ensureSync();
- }
-
- /**
- * Signal threads waiting to poll a task. Because method sync
- * rechecks availability, it is OK to only proceed if queue
- * appears to be non-empty, and OK to skip under contention to
- * increment count (since some other thread succeeded).
- */
- final void signalWork() {
- long c;
- WaitQueueNode q;
- if (syncStack != null &&
- casEventCount(c = eventCount, c+1) &&
- (((q = syncStack) != null && q.count <= c) &&
- (!casBarrierStack(q, q.next) || !q.signal())))
- ensureSync();
- }
-
- /**
- * Waits until event count advances from last value held by
- * caller, or if excess threads, caller is resumed as spare, or
- * caller or pool is terminating. Updates caller's event on exit.
- * @param w the calling worker thread
- */
- final void sync(ForkJoinWorkerThread w) {
- updateStealCount(w); // Transfer w's count while it is idle
-
- while (!w.isShutdown() && !isTerminating() && !suspendIfSpare(w)) {
- long prev = w.lastEventCount;
- WaitQueueNode node = null;
- WaitQueueNode h;
- while (eventCount == prev &&
- ((h = syncStack) == null || h.count == prev)) {
- if (node == null)
- node = new WaitQueueNode(prev, w);
- if (casBarrierStack(node.next = h, node)) {
- node.awaitSyncRelease(this);
- break;
- }
- }
- long ec = ensureSync();
- if (ec != prev) {
- w.lastEventCount = ec;
- break;
- }
- }
- }
-
- /**
- * Returns true if worker waiting on sync can proceed:
- * - on signal (thread == null)
- * - on event count advance (winning race to notify vs signaller)
- * - on Interrupt
- * - if the first queued node, we find work available
- * If node was not signalled and event count not advanced on exit,
- * then we also help advance event count.
- * @return true if node can be released
- */
- final boolean syncIsReleasable(WaitQueueNode node) {
- long prev = node.count;
- if (!Thread.interrupted() && node.thread != null &&
- (node.next != null ||
- !ForkJoinWorkerThread.hasQueuedTasks(workers)) &&
- eventCount == prev)
- return false;
- if (node.thread != null) {
- node.thread = null;
- long ec = eventCount;
- if (prev <= ec) // help signal
- casEventCount(ec, ec+1);
- }
- return true;
- }
-
- /**
- * Returns true if a new sync event occurred since last call to
- * sync or this method, if so, updating caller's count.
- */
- final boolean hasNewSyncEvent(ForkJoinWorkerThread w) {
- long lc = w.lastEventCount;
- long ec = ensureSync();
- if (ec == lc)
- return false;
- w.lastEventCount = ec;
- return true;
- }
-
- // Parallelism maintenance
-
- /**
- * Decrement running count; if too low, add spare.
- *
- * Conceptually, all we need to do here is add or resume a
- * spare thread when one is about to block (and remove or
- * suspend it later when unblocked -- see suspendIfSpare).
- * However, implementing this idea requires coping with
- * several problems: We have imperfect information about the
- * states of threads. Some count updates can and usually do
- * lag run state changes, despite arrangements to keep them
- * accurate (for example, when possible, updating counts
- * before signalling or resuming), especially when running on
- * dynamic JVMs that don't optimize the infrequent paths that
- * update counts. Generating too many threads can make these
- * problems become worse, because excess threads are more
- * likely to be context-switched with others, slowing them all
- * down, especially if there is no work available, so all are
- * busy scanning or idling. Also, excess spare threads can
- * only be suspended or removed when they are idle, not
- * immediately when they aren't needed. So adding threads will
- * raise parallelism level for longer than necessary. Also,
- * FJ applications often enounter highly transient peaks when
- * many threads are blocked joining, but for less time than it
- * takes to create or resume spares.
- *
- * @param joinMe if non-null, return early if done
- * @param maintainParallelism if true, try to stay within
- * target counts, else create only to avoid starvation
- * @return true if joinMe known to be done
- */
- final boolean preJoin(ForkJoinTask<?> joinMe, boolean maintainParallelism) {
- maintainParallelism &= maintainsParallelism; // overrride
- boolean dec = false; // true when running count decremented
- while (spareStack == null || !tryResumeSpare(dec)) {
- int counts = workerCounts;
- if (dec || (dec = casWorkerCounts(counts, --counts))) { // CAS cheat
- if (!needSpare(counts, maintainParallelism))
- break;
- if (joinMe.status < 0)
- return true;
- if (tryAddSpare(counts))
- break;
- }
- }
- return false;
- }
-
- /**
- * Same idea as preJoin
- */
- final boolean preBlock(ManagedBlocker blocker, boolean maintainParallelism){
- maintainParallelism &= maintainsParallelism;
- boolean dec = false;
- while (spareStack == null || !tryResumeSpare(dec)) {
- int counts = workerCounts;
- if (dec || (dec = casWorkerCounts(counts, --counts))) {
- if (!needSpare(counts, maintainParallelism))
- break;
- if (blocker.isReleasable())
- return true;
- if (tryAddSpare(counts))
- break;
- }
- }
- return false;
- }
-
- /**
- * Returns true if a spare thread appears to be needed. If
- * maintaining parallelism, returns true when the deficit in
- * running threads is more than the surplus of total threads, and
- * there is apparently some work to do. This self-limiting rule
- * means that the more threads that have already been added, the
- * less parallelism we will tolerate before adding another.
- * @param counts current worker counts
- * @param maintainParallelism try to maintain parallelism
- */
- private boolean needSpare(int counts, boolean maintainParallelism) {
- int ps = parallelism;
- int rc = runningCountOf(counts);
- int tc = totalCountOf(counts);
- int runningDeficit = ps - rc;
- int totalSurplus = tc - ps;
- return (tc < maxPoolSize &&
- (rc == 0 || totalSurplus < 0 ||
- (maintainParallelism &&
- runningDeficit > totalSurplus &&
- ForkJoinWorkerThread.hasQueuedTasks(workers))));
- }
-
- /**
- * Add a spare worker if lock available and no more than the
- * expected numbers of threads exist
- * @return true if successful
- */
- private boolean tryAddSpare(int expectedCounts) {
- final ReentrantLock lock = this.workerLock;
- int expectedRunning = runningCountOf(expectedCounts);
- int expectedTotal = totalCountOf(expectedCounts);
- boolean success = false;
- boolean locked = false;
- // confirm counts while locking; CAS after obtaining lock
- try {
- for (;;) {
- int s = workerCounts;
- int tc = totalCountOf(s);
- int rc = runningCountOf(s);
- if (rc > expectedRunning || tc > expectedTotal)
- break;
- if (!locked && !(locked = lock.tryLock()))
- break;
- if (casWorkerCounts(s, workerCountsFor(tc+1, rc+1))) {
- createAndStartSpare(tc);
- success = true;
- break;
- }
- }
- } finally {
- if (locked)
- lock.unlock();
- }
- return success;
- }
-
- /**
- * Add the kth spare worker. On entry, pool coounts are already
- * adjusted to reflect addition.
- */
- private void createAndStartSpare(int k) {
- ForkJoinWorkerThread w = null;
- ForkJoinWorkerThread[] ws = ensureWorkerArrayCapacity(k + 1);
- int len = ws.length;
- // Probably, we can place at slot k. If not, find empty slot
- if (k < len && ws[k] != null) {
- for (k = 0; k < len && ws[k] != null; ++k)
- ;
- }
- if (k < len && !isTerminating() && (w = createWorker(k)) != null) {
- ws[k] = w;
- w.start();
- }
- else
- updateWorkerCount(-1); // adjust on failure
- signalIdleWorkers();
- }
-
- /**
- * Suspend calling thread w if there are excess threads. Called
- * only from sync. Spares are enqueued in a Treiber stack
- * using the same WaitQueueNodes as barriers. They are resumed
- * mainly in preJoin, but are also woken on pool events that
- * require all threads to check run state.
- * @param w the caller
- */
- private boolean suspendIfSpare(ForkJoinWorkerThread w) {
- WaitQueueNode node = null;
- int s;
- while (parallelism < runningCountOf(s = workerCounts)) {
- if (node == null)
- node = new WaitQueueNode(0, w);
- if (casWorkerCounts(s, s-1)) { // representation-dependent
- // push onto stack
- do;while (!casSpareStack(node.next = spareStack, node));
- // block until released by resumeSpare
- node.awaitSpareRelease();
- return true;
- }
- }
- return false;
- }
-
- /**
- * Try to pop and resume a spare thread.
- * @param updateCount if true, increment running count on success
- * @return true if successful
- */
- private boolean tryResumeSpare(boolean updateCount) {
- WaitQueueNode q;
- while ((q = spareStack) != null) {
- if (casSpareStack(q, q.next)) {
- if (updateCount)
- updateRunningCount(1);
- q.signal();
- return true;
- }
- }
- return false;
- }
-
- /**
- * Pop and resume all spare threads. Same idea as ensureSync.
- * @return true if any spares released
- */
- private boolean resumeAllSpares() {
- WaitQueueNode q;
- while ( (q = spareStack) != null) {
- if (casSpareStack(q, null)) {
- do {
- updateRunningCount(1);
- q.signal();
- } while ((q = q.next) != null);
- return true;
- }
- }
- return false;
- }
-
- /**
- * Pop and shutdown excessive spare threads. Call only while
- * holding lock. This is not guaranteed to eliminate all excess
- * threads, only those suspended as spares, which are the ones
- * unlikely to be needed in the future.
- */
- private void trimSpares() {
- int surplus = totalCountOf(workerCounts) - parallelism;
- WaitQueueNode q;
- while (surplus > 0 && (q = spareStack) != null) {
- if (casSpareStack(q, null)) {
- do {
- updateRunningCount(1);
- ForkJoinWorkerThread w = q.thread;
- if (w != null && surplus > 0 &&
- runningCountOf(workerCounts) > 0 && w.shutdown())
- --surplus;
- q.signal();
- } while ((q = q.next) != null);
- }
- }
- }
-
/**
* Interface for extending managed parallelism for tasks running
- * in ForkJoinPools. A ManagedBlocker provides two methods.
- * Method <code>isReleasable</code> must return true if blocking is not
- * necessary. Method <code>block</code> blocks the current thread
- * if necessary (perhaps internally invoking isReleasable before
- * actually blocking.).
+ * in {@link ForkJoinPool}s.
+ *
+ * <p>A {@code ManagedBlocker} provides two methods. Method
+ * {@code isReleasable} must return {@code true} if blocking is
+ * not necessary. Method {@code block} blocks the current thread
+ * if necessary (perhaps internally invoking {@code isReleasable}
+ * before actually blocking). These actions are performed by any
+ * thread invoking {@link ForkJoinPool#managedBlock}. The
+ * unusual methods in this API accommodate synchronizers that may,
+ * but don't usually, block for long periods. Similarly, they
+ * allow more efficient internal handling of cases in which
+ * additional workers may be, but usually are not, needed to
+ * ensure sufficient parallelism. Toward this end,
+ * implementations of method {@code isReleasable} must be amenable
+ * to repeated invocation.
+ *
* <p>For example, here is a ManagedBlocker based on a
* ReentrantLock:
- * <pre>
- * class ManagedLocker implements ManagedBlocker {
- * final ReentrantLock lock;
- * boolean hasLock = false;
- * ManagedLocker(ReentrantLock lock) { this.lock = lock; }
- * public boolean block() {
- * if (!hasLock)
- * lock.lock();
- * return true;
- * }
- * public boolean isReleasable() {
- * return hasLock || (hasLock = lock.tryLock());
- * }
+ * <pre> {@code
+ * class ManagedLocker implements ManagedBlocker {
+ * final ReentrantLock lock;
+ * boolean hasLock = false;
+ * ManagedLocker(ReentrantLock lock) { this.lock = lock; }
+ * public boolean block() {
+ * if (!hasLock)
+ * lock.lock();
+ * return true;
+ * }
+ * public boolean isReleasable() {
+ * return hasLock || (hasLock = lock.tryLock());
+ * }
+ * }}</pre>
+ *
+ * <p>Here is a class that possibly blocks waiting for an
+ * item on a given queue:
+ * <pre> {@code
+ * class QueueTaker<E> implements ManagedBlocker {
+ * final BlockingQueue<E> queue;
+ * volatile E item = null;
+ * QueueTaker(BlockingQueue<E> q) { this.queue = q; }
+ * public boolean block() throws InterruptedException {
+ * if (item == null)
+ * item = queue.take();
+ * return true;
* }
- * </pre>
+ * public boolean isReleasable() {
+ * return item != null || (item = queue.poll()) != null;
+ * }
+ * public E getItem() { // call after pool.managedBlock completes
+ * return item;
+ * }
+ * }}</pre>
*/
public static interface ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
* a lock or condition.
- * @return true if no additional blocking is necessary (i.e.,
- * if isReleasable would return true).
+ *
+ * @return {@code true} if no additional blocking is necessary
+ * (i.e., if isReleasable would return true)
* @throws InterruptedException if interrupted while waiting
- * (the method is not required to do so, but is allowe to).
+ * (the method is not required to do so, but is allowed to)
*/
boolean block() throws InterruptedException;
/**
- * Returns true if blocking is unnecessary.
+ * Returns {@code true} if blocking is unnecessary.
*/
boolean isReleasable();
}
/**
* Blocks in accord with the given blocker. If the current thread
- * is a ForkJoinWorkerThread, this method possibly arranges for a
- * spare thread to be activated if necessary to ensure parallelism
- * while the current thread is blocked. If
- * <code>maintainParallelism</code> is true and the pool supports
- * it ({@link #getMaintainsParallelism}), this method attempts to
- * maintain the pool's nominal parallelism. Otherwise if activates
- * a thread only if necessary to avoid complete starvation. This
- * option may be preferable when blockages use timeouts, or are
- * almost always brief.
- *
- * <p> If the caller is not a ForkJoinTask, this method is behaviorally
- * equivalent to
- * <pre>
- * while (!blocker.isReleasable())
- * if (blocker.block())
- * return;
- * </pre>
- * If the caller is a ForkJoinTask, then the pool may first
- * be expanded to ensure parallelism, and later adjusted.
+ * is a {@link ForkJoinWorkerThread}, this method possibly
+ * arranges for a spare thread to be activated if necessary to
+ * ensure sufficient parallelism while the current thread is blocked.
+ *
+ * <p>If the caller is not a {@link ForkJoinTask}, this method is
+ * behaviorally equivalent to
+ * <pre> {@code
+ * while (!blocker.isReleasable())
+ * if (blocker.block())
+ * return;
+ * }</pre>
+ *
+ * If the caller is a {@code ForkJoinTask}, then the pool may
+ * first be expanded to ensure parallelism, and later adjusted.
*
* @param blocker the blocker
- * @param maintainParallelism if true and supported by this pool,
- * attempt to maintain the pool's nominal parallelism; otherwise
- * activate a thread only if necessary to avoid complete
- * starvation.
- * @throws InterruptedException if blocker.block did so.
- */
- public static void managedBlock(ManagedBlocker blocker,
- boolean maintainParallelism)
+ * @throws InterruptedException if blocker.block did so
+ */
+ public static void managedBlock(ManagedBlocker blocker)
throws InterruptedException {
Thread t = Thread.currentThread();
- ForkJoinPool pool = (t instanceof ForkJoinWorkerThread?
- ((ForkJoinWorkerThread)t).pool : null);
- if (!blocker.isReleasable()) {
- try {
- if (pool == null ||
- !pool.preBlock(blocker, maintainParallelism))
- awaitBlocker(blocker);
- } finally {
- if (pool != null)
- pool.updateRunningCount(1);
+ ForkJoinPool p = ((t instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread)t).pool : null);
+ while (!blocker.isReleasable()) {
+ if (p == null || p.tryCompensate(null, blocker)) {
+ try {
+ do {} while (!blocker.isReleasable() && !blocker.block());
+ } finally {
+ if (p != null)
+ p.incrementActiveCount();
+ }
+ break;
}
}
}
- private static void awaitBlocker(ManagedBlocker blocker)
- throws InterruptedException {
- do;while (!blocker.isReleasable() && !blocker.block());
- }
-
- // AbstractExecutorService overrides
+ // AbstractExecutorService overrides. These rely on undocumented
+ // fact that ForkJoinTask.adapt returns ForkJoinTasks that also
+ // implement RunnableFuture.
protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) {
- return new AdaptedRunnable(runnable, value);
+ return new ForkJoinTask.AdaptedRunnable<T>(runnable, value);
}
protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) {
- return new AdaptedCallable(callable);
+ return new ForkJoinTask.AdaptedCallable<T>(callable);
}
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long CTL;
+ private static final long PARKBLOCKER;
+ private static final int ABASE;
+ private static final int ASHIFT;
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
+ static {
+ poolNumberGenerator = new AtomicInteger();
+ nextSubmitterSeed = new AtomicInteger(0x55555555);
+ modifyThreadPermission = new RuntimePermission("modifyThread");
+ defaultForkJoinWorkerThreadFactory =
+ new DefaultForkJoinWorkerThreadFactory();
+ submitters = new ThreadSubmitter();
+ int s;
try {
- return Unsafe.getUnsafe();
+ U = getUnsafe();
+ Class<?> k = ForkJoinPool.class;
+ Class<?> ak = ForkJoinTask[].class;
+ CTL = U.objectFieldOffset
+ (k.getDeclaredField("ctl"));
+ Class<?> tk = Thread.class;
+ PARKBLOCKER = U.objectFieldOffset
+ (tk.getDeclaredField("parkBlocker"));
+ ABASE = U.arrayBaseOffset(ak);
+ s = U.arrayIndexScale(ak);
+ } catch (Exception e) {
+ throw new Error(e);
+ }
+ if ((s & (s-1)) != 0)
+ throw new Error("data type scale not a power of two");
+ ASHIFT = 31 - Integer.numberOfLeadingZeros(s);
+ }
+
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ private static sun.misc.Unsafe getUnsafe() {
+ try {
+ return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException se) {
try {
return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
+ (new java.security
+ .PrivilegedExceptionAction<sun.misc.Unsafe>() {
+ public sun.misc.Unsafe run() throws Exception {
+ java.lang.reflect.Field f = sun.misc
+ .Unsafe.class.getDeclaredField("theUnsafe");
+ f.setAccessible(true);
+ return (sun.misc.Unsafe) f.get(null);
}});
} catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
+ throw new RuntimeException("Could not initialize intrinsics",
+ e.getCause());
}
}
}
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName)
- throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
- (ForkJoinPool.class.getDeclaredField(fieldName));
- }
-
- static final Unsafe _unsafe;
- static final long eventCountOffset;
- static final long workerCountsOffset;
- static final long runControlOffset;
- static final long syncStackOffset;
- static final long spareStackOffset;
-
- static {
- try {
- _unsafe = getUnsafe();
- eventCountOffset = fieldOffset("eventCount");
- workerCountsOffset = fieldOffset("workerCounts");
- runControlOffset = fieldOffset("runControl");
- syncStackOffset = fieldOffset("syncStack");
- spareStackOffset = fieldOffset("spareStack");
- } catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
- }
- }
-
- private boolean casEventCount(long cmp, long val) {
- return _unsafe.compareAndSwapLong(this, eventCountOffset, cmp, val);
- }
- private boolean casWorkerCounts(int cmp, int val) {
- return _unsafe.compareAndSwapInt(this, workerCountsOffset, cmp, val);
- }
- private boolean casRunControl(int cmp, int val) {
- return _unsafe.compareAndSwapInt(this, runControlOffset, cmp, val);
- }
- private boolean casSpareStack(WaitQueueNode cmp, WaitQueueNode val) {
- return _unsafe.compareAndSwapObject(this, spareStackOffset, cmp, val);
- }
- private boolean casBarrierStack(WaitQueueNode cmp, WaitQueueNode val) {
- return _unsafe.compareAndSwapObject(this, syncStackOffset, cmp, val);
- }
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index dc1a6bcccc..344f6887a6 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -1,470 +1,597 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
import java.io.Serializable;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
+import java.util.Collection;
+import java.util.List;
+import java.util.RandomAccess;
+import java.lang.ref.WeakReference;
+import java.lang.ref.ReferenceQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+//import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.locks.ReentrantLock;
+import java.lang.reflect.Constructor;
/**
- * Abstract base class for tasks that run within a {@link
- * ForkJoinPool}. A ForkJoinTask is a thread-like entity that is much
+ * Abstract base class for tasks that run within a {@link ForkJoinPool}.
+ * A {@code ForkJoinTask} is a thread-like entity that is much
* lighter weight than a normal thread. Huge numbers of tasks and
* subtasks may be hosted by a small number of actual threads in a
* ForkJoinPool, at the price of some usage limitations.
*
- * <p> A "main" ForkJoinTask begins execution when submitted to a
- * {@link ForkJoinPool}. Once started, it will usually in turn start
- * other subtasks. As indicated by the name of this class, many
- * programs using ForkJoinTasks employ only methods <code>fork</code>
- * and <code>join</code>, or derivatives such as
- * <code>invokeAll</code>. However, this class also provides a number
- * of other methods that can come into play in advanced usages, as
- * well as extension mechanics that allow support of new forms of
- * fork/join processing.
+ * <p>A "main" {@code ForkJoinTask} begins execution when submitted
+ * to a {@link ForkJoinPool}. Once started, it will usually in turn
+ * start other subtasks. As indicated by the name of this class,
+ * many programs using {@code ForkJoinTask} employ only methods
+ * {@link #fork} and {@link #join}, or derivatives such as {@link
+ * #invokeAll(ForkJoinTask...) invokeAll}. However, this class also
+ * provides a number of other methods that can come into play in
+ * advanced usages, as well as extension mechanics that allow
+ * support of new forms of fork/join processing.
*
- * <p>A ForkJoinTask is a lightweight form of {@link Future}. The
- * efficiency of ForkJoinTasks stems from a set of restrictions (that
- * are only partially statically enforceable) reflecting their
- * intended use as computational tasks calculating pure functions or
- * operating on purely isolated objects. The primary coordination
- * mechanisms are {@link #fork}, that arranges asynchronous execution,
- * and {@link #join}, that doesn't proceed until the task's result has
- * been computed. Computations should avoid <code>synchronized</code>
- * methods or blocks, and should minimize other blocking
- * synchronization apart from joining other tasks or using
- * synchronizers such as Phasers that are advertised to cooperate with
- * fork/join scheduling. Tasks should also not perform blocking IO,
- * and should ideally access variables that are completely independent
- * of those accessed by other running tasks. Minor breaches of these
- * restrictions, for example using shared output streams, may be
- * tolerable in practice, but frequent use may result in poor
- * performance, and the potential to indefinitely stall if the number
- * of threads not waiting for IO or other external synchronization
- * becomes exhausted. This usage restriction is in part enforced by
- * not permitting checked exceptions such as <code>IOExceptions</code>
- * to be thrown. However, computations may still encounter unchecked
- * exceptions, that are rethrown to callers attempting join
- * them. These exceptions may additionally include
- * RejectedExecutionExceptions stemming from internal resource
- * exhaustion such as failure to allocate internal task queues.
+ * <p>A {@code ForkJoinTask} is a lightweight form of {@link Future}.
+ * The efficiency of {@code ForkJoinTask}s stems from a set of
+ * restrictions (that are only partially statically enforceable)
+ * reflecting their main use as computational tasks calculating pure
+ * functions or operating on purely isolated objects. The primary
+ * coordination mechanisms are {@link #fork}, that arranges
+ * asynchronous execution, and {@link #join}, that doesn't proceed
+ * until the task's result has been computed. Computations should
+ * ideally avoid {@code synchronized} methods or blocks, and should
+ * minimize other blocking synchronization apart from joining other
+ * tasks or using synchronizers such as Phasers that are advertised to
+ * cooperate with fork/join scheduling. Subdividable tasks should also
+ * not perform blocking IO, and should ideally access variables that
+ * are completely independent of those accessed by other running
+ * tasks. These guidelines are loosely enforced by not permitting
+ * checked exceptions such as {@code IOExceptions} to be
+ * thrown. However, computations may still encounter unchecked
+ * exceptions, that are rethrown to callers attempting to join
+ * them. These exceptions may additionally include {@link
+ * RejectedExecutionException} stemming from internal resource
+ * exhaustion, such as failure to allocate internal task
+ * queues. Rethrown exceptions behave in the same way as regular
+ * exceptions, but, when possible, contain stack traces (as displayed
+ * for example using {@code ex.printStackTrace()}) of both the thread
+ * that initiated the computation as well as the thread actually
+ * encountering the exception; minimally only the latter.
+ *
+ * <p>It is possible to define and use ForkJoinTasks that may block,
+ * but doing do requires three further considerations: (1) Completion
+ * of few if any <em>other</em> tasks should be dependent on a task
+ * that blocks on external synchronization or IO. Event-style async
+ * tasks that are never joined often fall into this category. (2) To
+ * minimize resource impact, tasks should be small; ideally performing
+ * only the (possibly) blocking action. (3) Unless the {@link
+ * ForkJoinPool.ManagedBlocker} API is used, or the number of possibly
+ * blocked tasks is known to be less than the pool's {@link
+ * ForkJoinPool#getParallelism} level, the pool cannot guarantee that
+ * enough threads will be available to ensure progress or good
+ * performance.
*
* <p>The primary method for awaiting completion and extracting
* results of a task is {@link #join}, but there are several variants:
* The {@link Future#get} methods support interruptible and/or timed
- * waits for completion and report results using <code>Future</code>
- * conventions. Method {@link #helpJoin} enables callers to actively
- * execute other tasks while awaiting joins, which is sometimes more
- * efficient but only applies when all subtasks are known to be
- * strictly tree-structured. Method {@link #invoke} is semantically
- * equivalent to <code>fork(); join()</code> but always attempts to
- * begin execution in the current thread. The "<em>quiet</em>" forms
- * of these methods do not extract results or report exceptions. These
+ * waits for completion and report results using {@code Future}
+ * conventions. Method {@link #invoke} is semantically
+ * equivalent to {@code fork(); join()} but always attempts to begin
+ * execution in the current thread. The "<em>quiet</em>" forms of
+ * these methods do not extract results or report exceptions. These
* may be useful when a set of tasks are being executed, and you need
* to delay processing of results or exceptions until all complete.
- * Method <code>invokeAll</code> (available in multiple versions)
+ * Method {@code invokeAll} (available in multiple versions)
* performs the most common form of parallel invocation: forking a set
* of tasks and joining them all.
*
- * <p> The ForkJoinTask class is not usually directly subclassed.
+ * <p>In the most typical usages, a fork-join pair act like a call
+ * (fork) and return (join) from a parallel recursive function. As is
+ * the case with other forms of recursive calls, returns (joins)
+ * should be performed innermost-first. For example, {@code a.fork();
+ * b.fork(); b.join(); a.join();} is likely to be substantially more
+ * efficient than joining {@code a} before {@code b}.
+ *
+ * <p>The execution status of tasks may be queried at several levels
+ * of detail: {@link #isDone} is true if a task completed in any way
+ * (including the case where a task was cancelled without executing);
+ * {@link #isCompletedNormally} is true if a task completed without
+ * cancellation or encountering an exception; {@link #isCancelled} is
+ * true if the task was cancelled (in which case {@link #getException}
+ * returns a {@link java.util.concurrent.CancellationException}); and
+ * {@link #isCompletedAbnormally} is true if a task was either
+ * cancelled or encountered an exception, in which case {@link
+ * #getException} will return either the encountered exception or
+ * {@link java.util.concurrent.CancellationException}.
+ *
+ * <p>The ForkJoinTask class is not usually directly subclassed.
* Instead, you subclass one of the abstract classes that support a
- * particular style of fork/join processing. Normally, a concrete
+ * particular style of fork/join processing, typically {@link
+ * RecursiveAction} for computations that do not return results, or
+ * {@link RecursiveTask} for those that do. Normally, a concrete
* ForkJoinTask subclass declares fields comprising its parameters,
- * established in a constructor, and then defines a <code>compute</code>
+ * established in a constructor, and then defines a {@code compute}
* method that somehow uses the control methods supplied by this base
- * class. While these methods have <code>public</code> access (to allow
- * instances of different task subclasses to call each others
+ * class. While these methods have {@code public} access (to allow
+ * instances of different task subclasses to call each other's
* methods), some of them may only be called from within other
- * ForkJoinTasks. Attempts to invoke them in other contexts result in
- * exceptions or errors possibly including ClassCastException.
+ * ForkJoinTasks (as may be determined using method {@link
+ * #inForkJoinPool}). Attempts to invoke them in other contexts
+ * result in exceptions or errors, possibly including
+ * {@code ClassCastException}.
*
- * <p>Most base support methods are <code>final</code> because their
- * implementations are intrinsically tied to the underlying
- * lightweight task scheduling framework, and so cannot be overridden.
- * Developers creating new basic styles of fork/join processing should
- * minimally implement <code>protected</code> methods
- * <code>exec</code>, <code>setRawResult</code>, and
- * <code>getRawResult</code>, while also introducing an abstract
- * computational method that can be implemented in its subclasses,
- * possibly relying on other <code>protected</code> methods provided
- * by this class.
+ * <p>Method {@link #join} and its variants are appropriate for use
+ * only when completion dependencies are acyclic; that is, the
+ * parallel computation can be described as a directed acyclic graph
+ * (DAG). Otherwise, executions may encounter a form of deadlock as
+ * tasks cyclically wait for each other. However, this framework
+ * supports other methods and techniques (for example the use of
+ * {@link Phaser}, {@link #helpQuiesce}, and {@link #complete}) that
+ * may be of use in constructing custom subclasses for problems that
+ * are not statically structured as DAGs. To support such usages a
+ * ForkJoinTask may be atomically <em>marked</em> using {@link
+ * #markForkJoinTask} and checked for marking using {@link
+ * #isMarkedForkJoinTask}. The ForkJoinTask implementation does not
+ * use these {@code protected} methods or marks for any purpose, but
+ * they may be of use in the construction of specialized subclasses.
+ * For example, parallel graph traversals can use the supplied methods
+ * to avoid revisiting nodes/tasks that have already been processed.
+ * Also, completion based designs can use them to record that one
+ * subtask has completed. (Method names for marking are bulky in part
+ * to encourage definition of methods that reflect their usage
+ * patterns.)
+ *
+ * <p>Most base support methods are {@code final}, to prevent
+ * overriding of implementations that are intrinsically tied to the
+ * underlying lightweight task scheduling framework. Developers
+ * creating new basic styles of fork/join processing should minimally
+ * implement {@code protected} methods {@link #exec}, {@link
+ * #setRawResult}, and {@link #getRawResult}, while also introducing
+ * an abstract computational method that can be implemented in its
+ * subclasses, possibly relying on other {@code protected} methods
+ * provided by this class.
*
* <p>ForkJoinTasks should perform relatively small amounts of
- * computations, othewise splitting into smaller tasks. As a very
- * rough rule of thumb, a task should perform more than 100 and less
- * than 10000 basic computational steps. If tasks are too big, then
- * parellelism cannot improve throughput. If too small, then memory
- * and internal task maintenance overhead may overwhelm processing.
+ * computation. Large tasks should be split into smaller subtasks,
+ * usually via recursive decomposition. As a very rough rule of thumb,
+ * a task should perform more than 100 and less than 10000 basic
+ * computational steps, and should avoid indefinite looping. If tasks
+ * are too big, then parallelism cannot improve throughput. If too
+ * small, then memory and internal task maintenance overhead may
+ * overwhelm processing.
+ *
+ * <p>This class provides {@code adapt} methods for {@link Runnable}
+ * and {@link Callable}, that may be of use when mixing execution of
+ * {@code ForkJoinTasks} with other kinds of tasks. When all tasks are
+ * of this form, consider using a pool constructed in <em>asyncMode</em>.
*
- * <p>ForkJoinTasks are <code>Serializable</code>, which enables them
- * to be used in extensions such as remote execution frameworks. It is
- * in general sensible to serialize tasks only before or after, but
- * not during execution. Serialization is not relied on during
- * execution itself.
+ * <p>ForkJoinTasks are {@code Serializable}, which enables them to be
+ * used in extensions such as remote execution frameworks. It is
+ * sensible to serialize tasks only before or after, but not during,
+ * execution. Serialization is not relied on during execution itself.
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
- /**
- * Run control status bits packed into a single int to minimize
- * footprint and to ensure atomicity (via CAS). Status is
- * initially zero, and takes on nonnegative values until
- * completed, upon which status holds COMPLETED. CANCELLED, or
- * EXCEPTIONAL, which use the top 3 bits. Tasks undergoing
- * blocking waits by other threads have SIGNAL_MASK bits set --
- * bit 15 for external (nonFJ) waits, and the rest a count of
- * waiting FJ threads. (This representation relies on
- * ForkJoinPool max thread limits). Completion of a stolen task
- * with SIGNAL_MASK bits set awakens waiter via notifyAll. Even
- * though suboptimal for some purposes, we use basic builtin
- * wait/notify to take advantage of "monitor inflation" in JVMs
- * that we would otherwise need to emulate to avoid adding further
- * per-task bookkeeping overhead. Note that bits 16-28 are
- * currently unused. Also value 0x80000000 is available as spare
- * completion value.
+ /*
+ * See the internal documentation of class ForkJoinPool for a
+ * general implementation overview. ForkJoinTasks are mainly
+ * responsible for maintaining their "status" field amidst relays
+ * to methods in ForkJoinWorkerThread and ForkJoinPool.
+ *
+ * The methods of this class are more-or-less layered into
+ * (1) basic status maintenance
+ * (2) execution and awaiting completion
+ * (3) user-level methods that additionally report results.
+ * This is sometimes hard to see because this file orders exported
+ * methods in a way that flows well in javadocs.
*/
- volatile int status; // accessed directy by pool and workers
- static final int COMPLETION_MASK = 0xe0000000;
- static final int NORMAL = 0xe0000000; // == mask
- static final int CANCELLED = 0xc0000000;
- static final int EXCEPTIONAL = 0xa0000000;
- static final int SIGNAL_MASK = 0x0000ffff;
- static final int INTERNAL_SIGNAL_MASK = 0x00007fff;
- static final int EXTERNAL_SIGNAL = 0x00008000; // top bit of low word
-
- /**
- * Table of exceptions thrown by tasks, to enable reporting by
- * callers. Because exceptions are rare, we don't directly keep
- * them with task objects, but instead us a weak ref table. Note
- * that cancellation exceptions don't appear in the table, but are
- * instead recorded as status values.
- * Todo: Use ConcurrentReferenceHashMap
+ /*
+ * The status field holds run control status bits packed into a
+ * single int to minimize footprint and to ensure atomicity (via
+ * CAS). Status is initially zero, and takes on nonnegative
+ * values until completed, upon which status (anded with
+ * DONE_MASK) holds value NORMAL, CANCELLED, or EXCEPTIONAL. Tasks
+ * undergoing blocking waits by other threads have the SIGNAL bit
+ * set. Completion of a stolen task with SIGNAL set awakens any
+ * waiters via notifyAll. Even though suboptimal for some
+ * purposes, we use basic builtin wait/notify to take advantage of
+ * "monitor inflation" in JVMs that we would otherwise need to
+ * emulate to avoid adding further per-task bookkeeping overhead.
+ * We want these monitors to be "fat", i.e., not use biasing or
+ * thin-lock techniques, so use some odd coding idioms that tend
+ * to avoid them, mainly by arranging that every synchronized
+ * block performs a wait, notifyAll or both.
*/
- static final Map<ForkJoinTask<?>, Throwable> exceptionMap =
- Collections.synchronizedMap
- (new WeakHashMap<ForkJoinTask<?>, Throwable>());
- // within-package utilities
+ /** The run status of this task */
+ volatile int status; // accessed directly by pool and workers
+ static final int DONE_MASK = 0xf0000000; // mask out non-completion bits
+ static final int NORMAL = 0xf0000000; // must be negative
+ static final int CANCELLED = 0xc0000000; // must be < NORMAL
+ static final int EXCEPTIONAL = 0x80000000; // must be < CANCELLED
+ static final int SIGNAL = 0x00000001;
+ static final int MARKED = 0x00000002;
/**
- * Get current worker thread, or null if not a worker thread
- */
- static ForkJoinWorkerThread getWorker() {
- Thread t = Thread.currentThread();
- return ((t instanceof ForkJoinWorkerThread)?
- (ForkJoinWorkerThread)t : null);
- }
-
- final boolean casStatus(int cmp, int val) {
- return _unsafe.compareAndSwapInt(this, statusOffset, cmp, val);
- }
-
- /**
- * Workaround for not being able to rethrow unchecked exceptions.
- */
- static void rethrowException(Throwable ex) {
- if (ex != null)
- _unsafe.throwException(ex);
- }
-
- // Setting completion status
-
- /**
- * Mark completion and wake up threads waiting to join this task.
+ * Marks completion and wakes up threads waiting to join this
+ * task. A specialization for NORMAL completion is in method
+ * doExec.
+ *
* @param completion one of NORMAL, CANCELLED, EXCEPTIONAL
+ * @return completion status on exit
*/
- final void setCompletion(int completion) {
- ForkJoinPool pool = getPool();
- if (pool != null) {
- int s; // Clear signal bits while setting completion status
- do;while ((s = status) >= 0 && !casStatus(s, completion));
-
- if ((s & SIGNAL_MASK) != 0) {
- if ((s &= INTERNAL_SIGNAL_MASK) != 0)
- pool.updateRunningCount(s);
- synchronized(this) { notifyAll(); }
+ private int setCompletion(int completion) {
+ for (int s;;) {
+ if ((s = status) < 0)
+ return s;
+ if (U.compareAndSwapInt(this, STATUS, s, s | completion)) {
+ if ((s & SIGNAL) != 0)
+ synchronized (this) { notifyAll(); }
+ return completion;
}
}
- else
- externallySetCompletion(completion);
- }
-
- /**
- * Version of setCompletion for non-FJ threads. Leaves signal
- * bits for unblocked threads to adjust, and always notifies.
- */
- private void externallySetCompletion(int completion) {
- int s;
- do;while ((s = status) >= 0 &&
- !casStatus(s, (s & SIGNAL_MASK) | completion));
- synchronized(this) { notifyAll(); }
- }
-
- /**
- * Sets status to indicate normal completion
- */
- final void setNormalCompletion() {
- // Try typical fast case -- single CAS, no signal, not already done.
- // Manually expand casStatus to improve chances of inlining it
- if (!_unsafe.compareAndSwapInt(this, statusOffset, 0, NORMAL))
- setCompletion(NORMAL);
- }
-
- // internal waiting and notification
-
- /**
- * Performs the actual monitor wait for awaitDone
- */
- private void doAwaitDone() {
- // Minimize lock bias and in/de-flation effects by maximizing
- // chances of waiting inside sync
- try {
- while (status >= 0)
- synchronized(this) { if (status >= 0) wait(); }
- } catch (InterruptedException ie) {
- onInterruptedWait();
- }
}
/**
- * Performs the actual monitor wait for awaitDone
+ * Primary execution method for stolen tasks. Unless done, calls
+ * exec and records status if completed, but doesn't wait for
+ * completion otherwise.
+ *
+ * @return status on exit from this method
*/
- private void doAwaitDone(long startTime, long nanos) {
- synchronized(this) {
+ final int doExec() {
+ int s; boolean completed;
+ if ((s = status) >= 0) {
try {
- while (status >= 0) {
- long nt = nanos - System.nanoTime() - startTime;
- if (nt <= 0)
- break;
- wait(nt / 1000000, (int)(nt % 1000000));
+ completed = exec();
+ } catch (Throwable rex) {
+ return setExceptionalCompletion(rex);
+ }
+ while ((s = status) >= 0 && completed) {
+ if (U.compareAndSwapInt(this, STATUS, s, s | NORMAL)) {
+ if ((s & SIGNAL) != 0)
+ synchronized (this) { notifyAll(); }
+ return NORMAL;
}
- } catch (InterruptedException ie) {
- onInterruptedWait();
}
}
+ return s;
}
- // Awaiting completion
+ /**
+ * Tries to set SIGNAL status. Used by ForkJoinPool. Other
+ * variants are directly incorporated into externalAwaitDone etc.
+ *
+ * @return true if successful
+ */
+ final boolean trySetSignal() {
+ int s;
+ return U.compareAndSwapInt(this, STATUS, s = status, s | SIGNAL);
+ }
/**
- * Sets status to indicate there is joiner, then waits for join,
- * surrounded with pool notifications.
- * @return status upon exit
+ * Blocks a non-worker-thread until completion.
+ * @return status upon completion
*/
- private int awaitDone(ForkJoinWorkerThread w, boolean maintainParallelism) {
- ForkJoinPool pool = w == null? null : w.pool;
+ private int externalAwaitDone() {
+ boolean interrupted = false;
int s;
while ((s = status) >= 0) {
- if (casStatus(s, pool == null? s|EXTERNAL_SIGNAL : s+1)) {
- if (pool == null || !pool.preJoin(this, maintainParallelism))
- doAwaitDone();
- if (((s = status) & INTERNAL_SIGNAL_MASK) != 0)
- adjustPoolCountsOnUnblock(pool);
- break;
+ if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
+ synchronized (this) {
+ if (status >= 0) {
+ try {
+ wait();
+ } catch (InterruptedException ie) {
+ interrupted = true;
+ }
+ }
+ else
+ notifyAll();
+ }
}
}
+ if (interrupted)
+ Thread.currentThread().interrupt();
return s;
}
/**
- * Timed version of awaitDone
- * @return status upon exit
+ * Blocks a non-worker-thread until completion or interruption.
*/
- private int awaitDone(ForkJoinWorkerThread w, long nanos) {
- ForkJoinPool pool = w == null? null : w.pool;
+ private int externalInterruptibleAwaitDone() throws InterruptedException {
int s;
+ if (Thread.interrupted())
+ throw new InterruptedException();
while ((s = status) >= 0) {
- if (casStatus(s, pool == null? s|EXTERNAL_SIGNAL : s+1)) {
- long startTime = System.nanoTime();
- if (pool == null || !pool.preJoin(this, false))
- doAwaitDone(startTime, nanos);
- if ((s = status) >= 0) {
- adjustPoolCountsOnCancelledWait(pool);
- s = status;
+ if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
+ synchronized (this) {
+ if (status >= 0)
+ wait();
+ else
+ notifyAll();
}
- if (s < 0 && (s & INTERNAL_SIGNAL_MASK) != 0)
- adjustPoolCountsOnUnblock(pool);
- break;
}
}
return s;
}
- /**
- * Notify pool that thread is unblocked. Called by signalled
- * threads when woken by non-FJ threads (which is atypical).
- */
- private void adjustPoolCountsOnUnblock(ForkJoinPool pool) {
- int s;
- do;while ((s = status) < 0 && !casStatus(s, s & COMPLETION_MASK));
- if (pool != null && (s &= INTERNAL_SIGNAL_MASK) != 0)
- pool.updateRunningCount(s);
- }
/**
- * Notify pool to adjust counts on cancelled or timed out wait
+ * Implementation for join, get, quietlyJoin. Directly handles
+ * only cases of already-completed, external wait, and
+ * unfork+exec. Others are relayed to ForkJoinPool.awaitJoin.
+ *
+ * @return status upon completion
*/
- private void adjustPoolCountsOnCancelledWait(ForkJoinPool pool) {
- if (pool != null) {
- int s;
- while ((s = status) >= 0 && (s & INTERNAL_SIGNAL_MASK) != 0) {
- if (casStatus(s, s - 1)) {
- pool.updateRunningCount(1);
- break;
- }
+ private int doJoin() {
+ int s; Thread t; ForkJoinWorkerThread wt; ForkJoinPool.WorkQueue w;
+ if ((s = status) >= 0) {
+ if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)) {
+ if (!(w = (wt = (ForkJoinWorkerThread)t).workQueue).
+ tryUnpush(this) || (s = doExec()) >= 0)
+ s = wt.pool.awaitJoin(w, this);
}
+ else
+ s = externalAwaitDone();
}
+ return s;
}
/**
- * Handle interruptions during waits.
+ * Implementation for invoke, quietlyInvoke.
+ *
+ * @return status upon completion
*/
- private void onInterruptedWait() {
- ForkJoinWorkerThread w = getWorker();
- if (w == null)
- Thread.currentThread().interrupt(); // re-interrupt
- else if (w.isTerminating())
- cancelIgnoringExceptions();
- // else if FJworker, ignore interrupt
+ private int doInvoke() {
+ int s; Thread t; ForkJoinWorkerThread wt;
+ if ((s = doExec()) >= 0) {
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ s = (wt = (ForkJoinWorkerThread)t).pool.awaitJoin(wt.workQueue,
+ this);
+ else
+ s = externalAwaitDone();
+ }
+ return s;
}
- // Recording and reporting exceptions
+ // Exception table support
- private void setDoneExceptionally(Throwable rex) {
- exceptionMap.put(this, rex);
- setCompletion(EXCEPTIONAL);
- }
+ /**
+ * Table of exceptions thrown by tasks, to enable reporting by
+ * callers. Because exceptions are rare, we don't directly keep
+ * them with task objects, but instead use a weak ref table. Note
+ * that cancellation exceptions don't appear in the table, but are
+ * instead recorded as status values.
+ *
+ * Note: These statics are initialized below in static block.
+ */
+ private static final ExceptionNode[] exceptionTable;
+ private static final ReentrantLock exceptionTableLock;
+ private static final ReferenceQueue<Object> exceptionTableRefQueue;
/**
- * Throws the exception associated with status s;
- * @throws the exception
+ * Fixed capacity for exceptionTable.
*/
- private void reportException(int s) {
- if ((s &= COMPLETION_MASK) < NORMAL) {
- if (s == CANCELLED)
- throw new CancellationException();
- else
- rethrowException(exceptionMap.get(this));
+ private static final int EXCEPTION_MAP_CAPACITY = 32;
+
+ /**
+ * Key-value nodes for exception table. The chained hash table
+ * uses identity comparisons, full locking, and weak references
+ * for keys. The table has a fixed capacity because it only
+ * maintains task exceptions long enough for joiners to access
+ * them, so should never become very large for sustained
+ * periods. However, since we do not know when the last joiner
+ * completes, we must use weak references and expunge them. We do
+ * so on each operation (hence full locking). Also, some thread in
+ * any ForkJoinPool will call helpExpungeStaleExceptions when its
+ * pool becomes isQuiescent.
+ */
+ static final class ExceptionNode extends WeakReference<ForkJoinTask<?>> {
+ final Throwable ex;
+ ExceptionNode next;
+ final long thrower; // use id not ref to avoid weak cycles
+ ExceptionNode(ForkJoinTask<?> task, Throwable ex, ExceptionNode next) {
+ super(task, exceptionTableRefQueue);
+ this.ex = ex;
+ this.next = next;
+ this.thrower = Thread.currentThread().getId();
}
}
/**
- * Returns result or throws exception using j.u.c.Future conventions
- * Only call when isDone known to be true.
+ * Records exception and sets exceptional completion.
+ *
+ * @return status on exit
*/
- private V reportFutureResult()
- throws ExecutionException, InterruptedException {
- int s = status & COMPLETION_MASK;
- if (s < NORMAL) {
- Throwable ex;
- if (s == CANCELLED)
- throw new CancellationException();
- if (s == EXCEPTIONAL && (ex = exceptionMap.get(this)) != null)
- throw new ExecutionException(ex);
- if (Thread.interrupted())
- throw new InterruptedException();
+ private int setExceptionalCompletion(Throwable ex) {
+ int h = System.identityHashCode(this);
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ expungeStaleExceptions();
+ ExceptionNode[] t = exceptionTable;
+ int i = h & (t.length - 1);
+ for (ExceptionNode e = t[i]; ; e = e.next) {
+ if (e == null) {
+ t[i] = new ExceptionNode(this, ex, t[i]);
+ break;
+ }
+ if (e.get() == this) // already present
+ break;
+ }
+ } finally {
+ lock.unlock();
}
- return getRawResult();
+ return setCompletion(EXCEPTIONAL);
}
/**
- * Returns result or throws exception using j.u.c.Future conventions
- * with timeouts
+ * Cancels, ignoring any exceptions thrown by cancel. Used during
+ * worker and pool shutdown. Cancel is spec'ed not to throw any
+ * exceptions, but if it does anyway, we have no recourse during
+ * shutdown, so guard against this case.
*/
- private V reportTimedFutureResult()
- throws InterruptedException, ExecutionException, TimeoutException {
- Throwable ex;
- int s = status & COMPLETION_MASK;
- if (s == NORMAL)
- return getRawResult();
- if (s == CANCELLED)
- throw new CancellationException();
- if (s == EXCEPTIONAL && (ex = exceptionMap.get(this)) != null)
- throw new ExecutionException(ex);
- if (Thread.interrupted())
- throw new InterruptedException();
- throw new TimeoutException();
+ static final void cancelIgnoringExceptions(ForkJoinTask<?> t) {
+ if (t != null && t.status >= 0) {
+ try {
+ t.cancel(false);
+ } catch (Throwable ignore) {
+ }
+ }
}
- // internal execution methods
-
/**
- * Calls exec, recording completion, and rethrowing exception if
- * encountered. Caller should normally check status before calling
- * @return true if completed normally
+ * Removes exception node and clears status
*/
- private boolean tryExec() {
- try { // try block must contain only call to exec
- if (!exec())
- return false;
- } catch (Throwable rex) {
- setDoneExceptionally(rex);
- rethrowException(rex);
- return false; // not reached
+ private void clearExceptionalCompletion() {
+ int h = System.identityHashCode(this);
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ ExceptionNode[] t = exceptionTable;
+ int i = h & (t.length - 1);
+ ExceptionNode e = t[i];
+ ExceptionNode pred = null;
+ while (e != null) {
+ ExceptionNode next = e.next;
+ if (e.get() == this) {
+ if (pred == null)
+ t[i] = next;
+ else
+ pred.next = next;
+ break;
+ }
+ pred = e;
+ e = next;
+ }
+ expungeStaleExceptions();
+ status = 0;
+ } finally {
+ lock.unlock();
}
- setNormalCompletion();
- return true;
}
/**
- * Main execution method used by worker threads. Invokes
- * base computation unless already complete
+ * Returns a rethrowable exception for the given task, if
+ * available. To provide accurate stack traces, if the exception
+ * was not thrown by the current thread, we try to create a new
+ * exception of the same type as the one thrown, but with the
+ * recorded exception as its cause. If there is no such
+ * constructor, we instead try to use a no-arg constructor,
+ * followed by initCause, to the same effect. If none of these
+ * apply, or any fail due to other exceptions, we return the
+ * recorded exception, which is still correct, although it may
+ * contain a misleading stack trace.
+ *
+ * @return the exception, or null if none
*/
- final void quietlyExec() {
- if (status >= 0) {
+ private Throwable getThrowableException() {
+ if ((status & DONE_MASK) != EXCEPTIONAL)
+ return null;
+ int h = System.identityHashCode(this);
+ ExceptionNode e;
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ expungeStaleExceptions();
+ ExceptionNode[] t = exceptionTable;
+ e = t[h & (t.length - 1)];
+ while (e != null && e.get() != this)
+ e = e.next;
+ } finally {
+ lock.unlock();
+ }
+ Throwable ex;
+ if (e == null || (ex = e.ex) == null)
+ return null;
+ if (e.thrower != Thread.currentThread().getId()) {
+ Class<? extends Throwable> ec = ex.getClass();
try {
- if (!exec())
- return;
- } catch(Throwable rex) {
- setDoneExceptionally(rex);
- return;
+ Constructor<?> noArgCtor = null;
+ Constructor<?>[] cs = ec.getConstructors();// public ctors only
+ for (int i = 0; i < cs.length; ++i) {
+ Constructor<?> c = cs[i];
+ Class<?>[] ps = c.getParameterTypes();
+ if (ps.length == 0)
+ noArgCtor = c;
+ else if (ps.length == 1 && ps[0] == Throwable.class)
+ return (Throwable)(c.newInstance(ex));
+ }
+ if (noArgCtor != null) {
+ Throwable wx = (Throwable)(noArgCtor.newInstance());
+ wx.initCause(ex);
+ return wx;
+ }
+ } catch (Exception ignore) {
}
- setNormalCompletion();
}
+ return ex;
}
/**
- * Calls exec, recording but not rethrowing exception
- * Caller should normally check status before calling
- * @return true if completed normally
+ * Poll stale refs and remove them. Call only while holding lock.
*/
- private boolean tryQuietlyInvoke() {
- try {
- if (!exec())
- return false;
- } catch (Throwable rex) {
- setDoneExceptionally(rex);
- return false;
+ private static void expungeStaleExceptions() {
+ for (Object x; (x = exceptionTableRefQueue.poll()) != null;) {
+ if (x instanceof ExceptionNode) {
+ ForkJoinTask<?> key = ((ExceptionNode)x).get();
+ ExceptionNode[] t = exceptionTable;
+ int i = System.identityHashCode(key) & (t.length - 1);
+ ExceptionNode e = t[i];
+ ExceptionNode pred = null;
+ while (e != null) {
+ ExceptionNode next = e.next;
+ if (e == x) {
+ if (pred == null)
+ t[i] = next;
+ else
+ pred.next = next;
+ break;
+ }
+ pred = e;
+ e = next;
+ }
+ }
}
- setNormalCompletion();
- return true;
}
/**
- * Cancel, ignoring any exceptions it throws
+ * If lock is available, poll stale refs and remove them.
+ * Called from ForkJoinPool when pools become quiescent.
*/
- final void cancelIgnoringExceptions() {
- try {
- cancel(false);
- } catch(Throwable ignore) {
+ static final void helpExpungeStaleExceptions() {
+ final ReentrantLock lock = exceptionTableLock;
+ if (lock.tryLock()) {
+ try {
+ expungeStaleExceptions();
+ } finally {
+ lock.unlock();
+ }
}
}
/**
- * Main implementation of helpJoin
+ * Throws exception, if any, associated with the given status.
*/
- private int busyJoin(ForkJoinWorkerThread w) {
- int s;
- ForkJoinTask<?> t;
- while ((s = status) >= 0 && (t = w.scanWhileJoining(this)) != null)
- t.quietlyExec();
- return (s >= 0)? awaitDone(w, false) : s; // block if no work
+ private void reportException(int s) {
+ Throwable ex = ((s == CANCELLED) ? new CancellationException() :
+ (s == EXCEPTIONAL) ? getThrowableException() :
+ null);
+ if (ex != null)
+ U.throwException(ex);
}
// public methods
@@ -472,70 +599,111 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/**
* Arranges to asynchronously execute this task. While it is not
* necessarily enforced, it is a usage error to fork a task more
- * than once unless it has completed and been reinitialized. This
- * method may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts result in
- * exceptions or errors possibly including ClassCastException.
+ * than once unless it has completed and been reinitialized.
+ * Subsequent modifications to the state of this task or any data
+ * it operates on are not necessarily consistently observable by
+ * any thread other than the one executing it unless preceded by a
+ * call to {@link #join} or related methods, or a call to {@link
+ * #isDone} returning {@code true}.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
+ * @return {@code this}, to simplify usage
*/
- public final void fork() {
- ((ForkJoinWorkerThread)(Thread.currentThread())).pushTask(this);
+ public final ForkJoinTask<V> fork() {
+ ((ForkJoinWorkerThread)Thread.currentThread()).workQueue.push(this);
+ return this;
}
/**
- * Returns the result of the computation when it is ready.
- * This method differs from <code>get</code> in that abnormal
- * completion results in RuntimeExceptions or Errors, not
- * ExecutionExceptions.
+ * Returns the result of the computation when it {@link #isDone is
+ * done}. This method differs from {@link #get()} in that
+ * abnormal completion results in {@code RuntimeException} or
+ * {@code Error}, not {@code ExecutionException}, and that
+ * interrupts of the calling thread do <em>not</em> cause the
+ * method to abruptly return by throwing {@code
+ * InterruptedException}.
*
* @return the computed result
*/
public final V join() {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || status < 0 || !w.unpushTask(this) || !tryExec())
- reportException(awaitDone(w, true));
+ int s;
+ if ((s = doJoin() & DONE_MASK) != NORMAL)
+ reportException(s);
return getRawResult();
}
/**
* Commences performing this task, awaits its completion if
- * necessary, and return its result.
- * @throws Throwable (a RuntimeException, Error, or unchecked
- * exception) if the underlying computation did so.
+ * necessary, and returns its result, or throws an (unchecked)
+ * {@code RuntimeException} or {@code Error} if the underlying
+ * computation did so.
+ *
* @return the computed result
*/
public final V invoke() {
- if (status >= 0 && tryExec())
- return getRawResult();
- else
- return join();
+ int s;
+ if ((s = doInvoke() & DONE_MASK) != NORMAL)
+ reportException(s);
+ return getRawResult();
}
/**
- * Forks both tasks, returning when <code>isDone</code> holds for
- * both of them or an exception is encountered. This method may be
- * invoked only from within ForkJoinTask computations. Attempts to
- * invoke in other contexts result in exceptions or errors
- * possibly including ClassCastException.
- * @param t1 one task
- * @param t2 the other task
- * @throws NullPointerException if t1 or t2 are null
- * @throws RuntimeException or Error if either task did so.
+ * Forks the given tasks, returning when {@code isDone} holds for
+ * each task or an (unchecked) exception is encountered, in which
+ * case the exception is rethrown. If more than one task
+ * encounters an exception, then this method throws any one of
+ * these exceptions. If any task encounters an exception, the
+ * other may be cancelled. However, the execution status of
+ * individual tasks is not guaranteed upon exceptional return. The
+ * status of each task may be obtained using {@link
+ * #getException()} and related methods to check if they have been
+ * cancelled, completed normally or exceptionally, or left
+ * unprocessed.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
+ * @param t1 the first task
+ * @param t2 the second task
+ * @throws NullPointerException if any task is null
*/
- public static void invokeAll(ForkJoinTask<?>t1, ForkJoinTask<?> t2) {
+ public static void invokeAll(ForkJoinTask<?> t1, ForkJoinTask<?> t2) {
+ int s1, s2;
t2.fork();
- t1.invoke();
- t2.join();
+ if ((s1 = t1.doInvoke() & DONE_MASK) != NORMAL)
+ t1.reportException(s1);
+ if ((s2 = t2.doJoin() & DONE_MASK) != NORMAL)
+ t2.reportException(s2);
}
/**
- * Forks the given tasks, returning when <code>isDone</code> holds
- * for all of them. If any task encounters an exception, others
- * may be cancelled. This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * result in exceptions or errors possibly including ClassCastException.
- * @param tasks the array of tasks
- * @throws NullPointerException if tasks or any element are null.
- * @throws RuntimeException or Error if any task did so.
+ * Forks the given tasks, returning when {@code isDone} holds for
+ * each task or an (unchecked) exception is encountered, in which
+ * case the exception is rethrown. If more than one task
+ * encounters an exception, then this method throws any one of
+ * these exceptions. If any task encounters an exception, others
+ * may be cancelled. However, the execution status of individual
+ * tasks is not guaranteed upon exceptional return. The status of
+ * each task may be obtained using {@link #getException()} and
+ * related methods to check if they have been cancelled, completed
+ * normally or exceptionally, or left unprocessed.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
+ * @param tasks the tasks
+ * @throws NullPointerException if any task is null
*/
public static void invokeAll(ForkJoinTask<?>... tasks) {
Throwable ex = null;
@@ -548,46 +716,53 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
else if (i != 0)
t.fork();
- else {
- t.quietlyInvoke();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doInvoke() < NORMAL && ex == null)
+ ex = t.getException();
}
for (int i = 1; i <= last; ++i) {
ForkJoinTask<?> t = tasks[i];
if (t != null) {
if (ex != null)
t.cancel(false);
- else {
- t.quietlyJoin();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doJoin() < NORMAL)
+ ex = t.getException();
}
}
if (ex != null)
- rethrowException(ex);
+ U.throwException(ex);
}
/**
- * Forks all tasks in the collection, returning when
- * <code>isDone</code> holds for all of them. If any task
- * encounters an exception, others may be cancelled. This method
- * may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts resul!t in
- * exceptions or errors possibly including ClassCastException.
+ * Forks all tasks in the specified collection, returning when
+ * {@code isDone} holds for each task or an (unchecked) exception
+ * is encountered, in which case the exception is rethrown. If
+ * more than one task encounters an exception, then this method
+ * throws any one of these exceptions. If any task encounters an
+ * exception, others may be cancelled. However, the execution
+ * status of individual tasks is not guaranteed upon exceptional
+ * return. The status of each task may be obtained using {@link
+ * #getException()} and related methods to check if they have been
+ * cancelled, completed normally or exceptionally, or left
+ * unprocessed.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
* @param tasks the collection of tasks
- * @throws NullPointerException if tasks or any element are null.
- * @throws RuntimeException or Error if any task did so.
+ * @return the tasks argument, to simplify usage
+ * @throws NullPointerException if tasks or any element are null
*/
- public static void invokeAll(Collection<? extends ForkJoinTask<?>> tasks) {
- if (!(tasks instanceof List)) {
- invokeAll(tasks.toArray(new ForkJoinTask[tasks.size()]));
- return;
+ public static <T extends ForkJoinTask<?>> Collection<T> invokeAll(Collection<T> tasks) {
+ if (!(tasks instanceof RandomAccess) || !(tasks instanceof List<?>)) {
+ invokeAll(tasks.toArray(new ForkJoinTask<?>[tasks.size()]));
+ return tasks;
}
+ @SuppressWarnings("unchecked")
List<? extends ForkJoinTask<?>> ts =
- (List<? extends ForkJoinTask<?>>)tasks;
+ (List<? extends ForkJoinTask<?>>) tasks;
Throwable ex = null;
int last = ts.size() - 1;
for (int i = last; i >= 0; --i) {
@@ -598,253 +773,326 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
else if (i != 0)
t.fork();
- else {
- t.quietlyInvoke();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doInvoke() < NORMAL && ex == null)
+ ex = t.getException();
}
for (int i = 1; i <= last; ++i) {
ForkJoinTask<?> t = ts.get(i);
if (t != null) {
if (ex != null)
t.cancel(false);
- else {
- t.quietlyJoin();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doJoin() < NORMAL)
+ ex = t.getException();
}
}
if (ex != null)
- rethrowException(ex);
+ U.throwException(ex);
+ return tasks;
}
/**
- * Returns true if the computation performed by this task has
- * completed (or has been cancelled).
- * @return true if this computation has completed
+ * Attempts to cancel execution of this task. This attempt will
+ * fail if the task has already completed or could not be
+ * cancelled for some other reason. If successful, and this task
+ * has not started when {@code cancel} is called, execution of
+ * this task is suppressed. After this method returns
+ * successfully, unless there is an intervening call to {@link
+ * #reinitialize}, subsequent calls to {@link #isCancelled},
+ * {@link #isDone}, and {@code cancel} will return {@code true}
+ * and calls to {@link #join} and related methods will result in
+ * {@code CancellationException}.
+ *
+ * <p>This method may be overridden in subclasses, but if so, must
+ * still ensure that these properties hold. In particular, the
+ * {@code cancel} method itself must not throw exceptions.
+ *
+ * <p>This method is designed to be invoked by <em>other</em>
+ * tasks. To terminate the current task, you can just return or
+ * throw an unchecked exception from its computation method, or
+ * invoke {@link #completeExceptionally}.
+ *
+ * @param mayInterruptIfRunning this value has no effect in the
+ * default implementation because interrupts are not used to
+ * control cancellation.
+ *
+ * @return {@code true} if this task is now cancelled
*/
+ public boolean cancel(boolean mayInterruptIfRunning) {
+ return (setCompletion(CANCELLED) & DONE_MASK) == CANCELLED;
+ }
+
public final boolean isDone() {
return status < 0;
}
- /**
- * Returns true if this task was cancelled.
- * @return true if this task was cancelled
- */
public final boolean isCancelled() {
- return (status & COMPLETION_MASK) == CANCELLED;
+ return (status & DONE_MASK) == CANCELLED;
}
/**
- * Asserts that the results of this task's computation will not be
- * used. If a cancellation occurs before atempting to execute this
- * task, then execution will be suppressed, <code>isCancelled</code>
- * will report true, and <code>join</code> will result in a
- * <code>CancellationException</code> being thrown. Otherwise, when
- * cancellation races with completion, there are no guarantees
- * about whether <code>isCancelled</code> will report true, whether
- * <code>join</code> will return normally or via an exception, or
- * whether these behaviors will remain consistent upon repeated
- * invocation.
- *
- * <p>This method may be overridden in subclasses, but if so, must
- * still ensure that these minimal properties hold. In particular,
- * the cancel method itself must not throw exceptions.
- *
- * <p> This method is designed to be invoked by <em>other</em>
- * tasks. To terminate the current task, you can just return or
- * throw an unchecked exception from its computation method, or
- * invoke <code>completeExceptionally</code>.
- *
- * @param mayInterruptIfRunning this value is ignored in the
- * default implementation because tasks are not in general
- * cancelled via interruption.
+ * Returns {@code true} if this task threw an exception or was cancelled.
*
- * @return true if this task is now cancelled
+ * @return {@code true} if this task threw an exception or was cancelled
*/
- public boolean cancel(boolean mayInterruptIfRunning) {
- setCompletion(CANCELLED);
- return (status & COMPLETION_MASK) == CANCELLED;
+ public final boolean isCompletedAbnormally() {
+ return status < NORMAL;
}
/**
- * Returns true if this task threw an exception or was cancelled
- * @return true if this task threw an exception or was cancelled
+ * Returns {@code true} if this task completed without throwing an
+ * exception and was not cancelled.
+ *
+ * @return {@code true} if this task completed without throwing an
+ * exception and was not cancelled
*/
- public final boolean isCompletedAbnormally() {
- return (status & COMPLETION_MASK) < NORMAL;
+ public final boolean isCompletedNormally() {
+ return (status & DONE_MASK) == NORMAL;
}
/**
* Returns the exception thrown by the base computation, or a
- * CancellationException if cancelled, or null if none or if the
- * method has not yet completed.
- * @return the exception, or null if none
+ * {@code CancellationException} if cancelled, or {@code null} if
+ * none or if the method has not yet completed.
+ *
+ * @return the exception, or {@code null} if none
*/
public final Throwable getException() {
- int s = status & COMPLETION_MASK;
- if (s >= NORMAL)
- return null;
- if (s == CANCELLED)
- return new CancellationException();
- return exceptionMap.get(this);
+ int s = status & DONE_MASK;
+ return ((s >= NORMAL) ? null :
+ (s == CANCELLED) ? new CancellationException() :
+ getThrowableException());
}
/**
* Completes this task abnormally, and if not already aborted or
* cancelled, causes it to throw the given exception upon
- * <code>join</code> and related operations. This method may be used
+ * {@code join} and related operations. This method may be used
* to induce exceptions in asynchronous tasks, or to force
* completion of tasks that would not otherwise complete. Its use
- * in other situations is likely to be wrong. This method is
- * overridable, but overridden versions must invoke <code>super</code>
+ * in other situations is discouraged. This method is
+ * overridable, but overridden versions must invoke {@code super}
* implementation to maintain guarantees.
*
- * @param ex the exception to throw. If this exception is
- * not a RuntimeException or Error, the actual exception thrown
- * will be a RuntimeException with cause ex.
+ * @param ex the exception to throw. If this exception is not a
+ * {@code RuntimeException} or {@code Error}, the actual exception
+ * thrown will be a {@code RuntimeException} with cause {@code ex}.
*/
public void completeExceptionally(Throwable ex) {
- setDoneExceptionally((ex instanceof RuntimeException) ||
- (ex instanceof Error)? ex :
- new RuntimeException(ex));
+ setExceptionalCompletion((ex instanceof RuntimeException) ||
+ (ex instanceof Error) ? ex :
+ new RuntimeException(ex));
}
/**
* Completes this task, and if not already aborted or cancelled,
- * returning a <code>null</code> result upon <code>join</code> and related
- * operations. This method may be used to provide results for
- * asynchronous tasks, or to provide alternative handling for
- * tasks that would not otherwise complete normally. Its use in
- * other situations is likely to be wrong. This method is
- * overridable, but overridden versions must invoke <code>super</code>
- * implementation to maintain guarantees.
+ * returning the given value as the result of subsequent
+ * invocations of {@code join} and related operations. This method
+ * may be used to provide results for asynchronous tasks, or to
+ * provide alternative handling for tasks that would not otherwise
+ * complete normally. Its use in other situations is
+ * discouraged. This method is overridable, but overridden
+ * versions must invoke {@code super} implementation to maintain
+ * guarantees.
*
- * @param value the result value for this task.
+ * @param value the result value for this task
*/
public void complete(V value) {
try {
setRawResult(value);
- } catch(Throwable rex) {
- setDoneExceptionally(rex);
+ } catch (Throwable rex) {
+ setExceptionalCompletion(rex);
return;
}
- setNormalCompletion();
- }
-
- public final V get() throws InterruptedException, ExecutionException {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || status < 0 || !w.unpushTask(this) || !tryQuietlyInvoke())
- awaitDone(w, true);
- return reportFutureResult();
- }
-
- public final V get(long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || status < 0 || !w.unpushTask(this) || !tryQuietlyInvoke())
- awaitDone(w, unit.toNanos(timeout));
- return reportTimedFutureResult();
+ setCompletion(NORMAL);
}
/**
- * Possibly executes other tasks until this task is ready, then
- * returns the result of the computation. This method may be more
- * efficient than <code>join</code>, but is only applicable when
- * there are no potemtial dependencies between continuation of the
- * current task and that of any other task that might be executed
- * while helping. (This usually holds for pure divide-and-conquer
- * tasks). This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * resul!t in exceptions or errors possibly including ClassCastException.
+ * Waits if necessary for the computation to complete, and then
+ * retrieves its result.
+ *
* @return the computed result
+ * @throws CancellationException if the computation was cancelled
+ * @throws ExecutionException if the computation threw an
+ * exception
+ * @throws InterruptedException if the current thread is not a
+ * member of a ForkJoinPool and was interrupted while waiting
*/
- public final V helpJoin() {
- ForkJoinWorkerThread w = (ForkJoinWorkerThread)(Thread.currentThread());
- if (status < 0 || !w.unpushTask(this) || !tryExec())
- reportException(busyJoin(w));
+ public final V get() throws InterruptedException, ExecutionException {
+ int s = (Thread.currentThread() instanceof ForkJoinWorkerThread) ?
+ doJoin() : externalInterruptibleAwaitDone();
+ Throwable ex;
+ if ((s &= DONE_MASK) == CANCELLED)
+ throw new CancellationException();
+ if (s == EXCEPTIONAL && (ex = getThrowableException()) != null)
+ throw new ExecutionException(ex);
return getRawResult();
}
/**
- * Possibly executes other tasks until this task is ready. This
- * method may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts resul!t in
- * exceptions or errors possibly including ClassCastException.
+ * Waits if necessary for at most the given time for the computation
+ * to complete, and then retrieves its result, if available.
+ *
+ * @param timeout the maximum time to wait
+ * @param unit the time unit of the timeout argument
+ * @return the computed result
+ * @throws CancellationException if the computation was cancelled
+ * @throws ExecutionException if the computation threw an
+ * exception
+ * @throws InterruptedException if the current thread is not a
+ * member of a ForkJoinPool and was interrupted while waiting
+ * @throws TimeoutException if the wait timed out
*/
- public final void quietlyHelpJoin() {
- if (status >= 0) {
- ForkJoinWorkerThread w =
- (ForkJoinWorkerThread)(Thread.currentThread());
- if (!w.unpushTask(this) || !tryQuietlyInvoke())
- busyJoin(w);
+ public final V get(long timeout, TimeUnit unit)
+ throws InterruptedException, ExecutionException, TimeoutException {
+ if (Thread.interrupted())
+ throw new InterruptedException();
+ // Messy in part because we measure in nanosecs, but wait in millisecs
+ int s; long ns, ms;
+ if ((s = status) >= 0 && (ns = unit.toNanos(timeout)) > 0L) {
+ long deadline = System.nanoTime() + ns;
+ ForkJoinPool p = null;
+ ForkJoinPool.WorkQueue w = null;
+ Thread t = Thread.currentThread();
+ if (t instanceof ForkJoinWorkerThread) {
+ ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
+ p = wt.pool;
+ w = wt.workQueue;
+ s = p.helpJoinOnce(w, this); // no retries on failure
+ }
+ boolean canBlock = false;
+ boolean interrupted = false;
+ try {
+ while ((s = status) >= 0) {
+ if (w != null && w.runState < 0)
+ cancelIgnoringExceptions(this);
+ else if (!canBlock) {
+ if (p == null || p.tryCompensate(this, null))
+ canBlock = true;
+ }
+ else {
+ if ((ms = TimeUnit.NANOSECONDS.toMillis(ns)) > 0L &&
+ U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
+ synchronized (this) {
+ if (status >= 0) {
+ try {
+ wait(ms);
+ } catch (InterruptedException ie) {
+ if (p == null)
+ interrupted = true;
+ }
+ }
+ else
+ notifyAll();
+ }
+ }
+ if ((s = status) < 0 || interrupted ||
+ (ns = deadline - System.nanoTime()) <= 0L)
+ break;
+ }
+ }
+ } finally {
+ if (p != null && canBlock)
+ p.incrementActiveCount();
+ }
+ if (interrupted)
+ throw new InterruptedException();
+ }
+ if ((s &= DONE_MASK) != NORMAL) {
+ Throwable ex;
+ if (s == CANCELLED)
+ throw new CancellationException();
+ if (s != EXCEPTIONAL)
+ throw new TimeoutException();
+ if ((ex = getThrowableException()) != null)
+ throw new ExecutionException(ex);
}
+ return getRawResult();
}
/**
- * Joins this task, without returning its result or throwing an
+ * Joins this task, without returning its result or throwing its
* exception. This method may be useful when processing
* collections of tasks when some have been cancelled or otherwise
* known to have aborted.
*/
public final void quietlyJoin() {
- if (status >= 0) {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || !w.unpushTask(this) || !tryQuietlyInvoke())
- awaitDone(w, true);
- }
+ doJoin();
}
/**
* Commences performing this task and awaits its completion if
- * necessary, without returning its result or throwing an
- * exception. This method may be useful when processing
- * collections of tasks when some have been cancelled or otherwise
- * known to have aborted.
+ * necessary, without returning its result or throwing its
+ * exception.
*/
public final void quietlyInvoke() {
- if (status >= 0 && !tryQuietlyInvoke())
- quietlyJoin();
+ doInvoke();
}
/**
* Possibly executes tasks until the pool hosting the current task
- * {@link ForkJoinPool#isQuiescent}. This method may be of use in
- * designs in which many tasks are forked, but none are explicitly
- * joined, instead executing them until all are processed.
+ * {@link ForkJoinPool#isQuiescent is quiescent}. This method may
+ * be of use in designs in which many tasks are forked, but none
+ * are explicitly joined, instead executing them until all are
+ * processed.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
*/
public static void helpQuiesce() {
- ((ForkJoinWorkerThread)(Thread.currentThread())).
- helpQuiescePool();
+ ForkJoinWorkerThread wt =
+ (ForkJoinWorkerThread)Thread.currentThread();
+ wt.pool.helpQuiescePool(wt.workQueue);
}
/**
* Resets the internal bookkeeping state of this task, allowing a
- * subsequent <code>fork</code>. This method allows repeated reuse of
+ * subsequent {@code fork}. This method allows repeated reuse of
* this task, but only if reuse occurs when this task has either
* never been forked, or has been forked, then completed and all
* outstanding joins of this task have also completed. Effects
- * under any other usage conditions are not guaranteed, and are
- * almost surely wrong. This method may be useful when executing
+ * under any other usage conditions are not guaranteed.
+ * This method may be useful when executing
* pre-constructed trees of subtasks in loops.
+ *
+ * <p>Upon completion of this method, {@code isDone()} reports
+ * {@code false}, and {@code getException()} reports {@code
+ * null}. However, the value returned by {@code getRawResult} is
+ * unaffected. To clear this value, you can invoke {@code
+ * setRawResult(null)}.
*/
public void reinitialize() {
- if ((status & COMPLETION_MASK) == EXCEPTIONAL)
- exceptionMap.remove(this);
- status = 0;
+ if ((status & DONE_MASK) == EXCEPTIONAL)
+ clearExceptionalCompletion();
+ else
+ status = 0;
}
/**
* Returns the pool hosting the current task execution, or null
- * if this task is executing outside of any pool.
- * @return the pool, or null if none.
+ * if this task is executing outside of any ForkJoinPool.
+ *
+ * @see #inForkJoinPool
+ * @return the pool, or {@code null} if none
*/
public static ForkJoinPool getPool() {
Thread t = Thread.currentThread();
- return ((t instanceof ForkJoinWorkerThread)?
- ((ForkJoinWorkerThread)t).pool : null);
+ return (t instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread) t).pool : null;
+ }
+
+ /**
+ * Returns {@code true} if the current thread is a {@link
+ * ForkJoinWorkerThread} executing as a ForkJoinPool computation.
+ *
+ * @return {@code true} if the current thread is a {@link
+ * ForkJoinWorkerThread} executing as a ForkJoinPool computation,
+ * or {@code false} otherwise
+ */
+ public static boolean inForkJoinPool() {
+ return Thread.currentThread() instanceof ForkJoinWorkerThread;
}
/**
@@ -853,13 +1101,19 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* by the current thread, and has not commenced executing in
* another thread. This method may be useful when arranging
* alternative local processing of tasks that could have been, but
- * were not, stolen. This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * result in exceptions or errors possibly including ClassCastException.
- * @return true if unforked
+ * were not, stolen.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
+ * @return {@code true} if unforked
*/
public boolean tryUnfork() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).unpushTask(this);
+ return ((ForkJoinWorkerThread)Thread.currentThread())
+ .workQueue.tryUnpush(this);
}
/**
@@ -867,15 +1121,22 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* forked by the current worker thread but not yet executed. This
* value may be useful for heuristic decisions about whether to
* fork other tasks.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
* @return the number of tasks
*/
public static int getQueuedTaskCount() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).
- getQueueSize();
+ return ((ForkJoinWorkerThread) Thread.currentThread())
+ .workQueue.queueSize();
}
/**
- * Returns a estimate of how many more locally queued tasks are
+ * Returns an estimate of how many more locally queued tasks are
* held by the current worker thread than there are other worker
* threads that might steal them. This value may be useful for
* heuristic decisions about whether to fork other tasks. In many
@@ -883,23 +1144,74 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* aim to maintain a small constant surplus (for example, 3) of
* tasks, and to process computations locally if this threshold is
* exceeded.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
* @return the surplus number of tasks, which may be negative
*/
public static int getSurplusQueuedTaskCount() {
- return ((ForkJoinWorkerThread)(Thread.currentThread()))
- .getEstimatedSurplusTaskCount();
+ /*
+ * The aim of this method is to return a cheap heuristic guide
+ * for task partitioning when programmers, frameworks, tools,
+ * or languages have little or no idea about task granularity.
+ * In essence by offering this method, we ask users only about
+ * tradeoffs in overhead vs expected throughput and its
+ * variance, rather than how finely to partition tasks.
+ *
+ * In a steady state strict (tree-structured) computation,
+ * each thread makes available for stealing enough tasks for
+ * other threads to remain active. Inductively, if all threads
+ * play by the same rules, each thread should make available
+ * only a constant number of tasks.
+ *
+ * The minimum useful constant is just 1. But using a value of
+ * 1 would require immediate replenishment upon each steal to
+ * maintain enough tasks, which is infeasible. Further,
+ * partitionings/granularities of offered tasks should
+ * minimize steal rates, which in general means that threads
+ * nearer the top of computation tree should generate more
+ * than those nearer the bottom. In perfect steady state, each
+ * thread is at approximately the same level of computation
+ * tree. However, producing extra tasks amortizes the
+ * uncertainty of progress and diffusion assumptions.
+ *
+ * So, users will want to use values larger, but not much
+ * larger than 1 to both smooth over transient shortages and
+ * hedge against uneven progress; as traded off against the
+ * cost of extra task overhead. We leave the user to pick a
+ * threshold value to compare with the results of this call to
+ * guide decisions, but recommend values such as 3.
+ *
+ * When all threads are active, it is on average OK to
+ * estimate surplus strictly locally. In steady-state, if one
+ * thread is maintaining say 2 surplus tasks, then so are
+ * others. So we can just use estimated queue length.
+ * However, this strategy alone leads to serious mis-estimates
+ * in some non-steady-state conditions (ramp-up, ramp-down,
+ * other stalls). We can detect many of these by further
+ * considering the number of "idle" threads, that are known to
+ * have zero queued tasks, so compensate by a factor of
+ * (#idle/#active) threads.
+ */
+ ForkJoinWorkerThread wt =
+ (ForkJoinWorkerThread)Thread.currentThread();
+ return wt.workQueue.queueSize() - wt.pool.idlePerActive();
}
// Extension methods
/**
- * Returns the result that would be returned by <code>join</code>,
- * even if this task completed abnormally, or null if this task is
- * not known to have been completed. This method is designed to
- * aid debugging, as well as to support extensions. Its use in any
- * other context is discouraged.
+ * Returns the result that would be returned by {@link #join}, even
+ * if this task completed abnormally, or {@code null} if this task
+ * is not known to have been completed. This method is designed
+ * to aid debugging, as well as to support extensions. Its use in
+ * any other context is discouraged.
*
- * @return the result, or null if not completed.
+ * @return the result, or {@code null} if not completed
*/
public abstract V getRawResult();
@@ -918,42 +1230,52 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* called otherwise. The return value controls whether this task
* is considered to be done normally. It may return false in
* asynchronous actions that require explicit invocations of
- * <code>complete</code> to become joinable. It may throw exceptions
- * to indicate abnormal exit.
- * @return true if completed normally
- * @throws Error or RuntimeException if encountered during computation
+ * {@link #complete} to become joinable. It may also throw an
+ * (unchecked) exception to indicate abnormal exit.
+ *
+ * @return {@code true} if completed normally
*/
protected abstract boolean exec();
/**
- * Returns, but does not unschedule or execute, the task queued by
- * the current thread but not yet executed, if one is
+ * Returns, but does not unschedule or execute, a task queued by
+ * the current thread but not yet executed, if one is immediately
* available. There is no guarantee that this task will actually
- * be polled or executed next. This method is designed primarily
- * to support extensions, and is unlikely to be useful otherwise.
- * This method may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts result in
- * exceptions or errors possibly including ClassCastException.
+ * be polled or executed next. Conversely, this method may return
+ * null even if a task exists but cannot be accessed without
+ * contention with other threads. This method is designed
+ * primarily to support extensions, and is unlikely to be useful
+ * otherwise.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
*
- * @return the next task, or null if none are available
+ * @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> peekNextLocalTask() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).peekTask();
+ return ((ForkJoinWorkerThread) Thread.currentThread()).workQueue.peek();
}
/**
* Unschedules and returns, without executing, the next task
* queued by the current thread but not yet executed. This method
* is designed primarily to support extensions, and is unlikely to
- * be useful otherwise. This method may be invoked only from
- * within ForkJoinTask computations. Attempts to invoke in other
- * contexts result in exceptions or errors possibly including
- * ClassCastException.
+ * be useful otherwise.
+ *
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
*
- * @return the next task, or null if none are available
+ * @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollNextLocalTask() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).pollLocalTask();
+ return ((ForkJoinWorkerThread) Thread.currentThread())
+ .workQueue.nextLocalTask();
}
/**
@@ -961,19 +1283,170 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* queued by the current thread but not yet executed, if one is
* available, or if not available, a task that was forked by some
* other thread, if available. Availability may be transient, so a
- * <code>null</code> result does not necessarily imply quiecence
+ * {@code null} result does not necessarily imply quiescence
* of the pool this task is operating in. This method is designed
* primarily to support extensions, and is unlikely to be useful
- * otherwise. This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * result in exceptions or errors possibly including
- * ClassCastException.
+ * otherwise.
*
- * @return a task, or null if none are available
+ * <p>This method may be invoked only from within {@code
+ * ForkJoinPool} computations (as may be determined using method
+ * {@link #inForkJoinPool}). Attempts to invoke in other contexts
+ * result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
+ *
+ * @return a task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollTask() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).
- pollTask();
+ ForkJoinWorkerThread wt =
+ (ForkJoinWorkerThread)Thread.currentThread();
+ return wt.pool.nextTaskFor(wt.workQueue);
+ }
+
+ // Mark-bit operations
+
+ /**
+ * Returns true if this task is marked.
+ *
+ * @return true if this task is marked
+ * @since 1.8
+ */
+ public final boolean isMarkedForkJoinTask() {
+ return (status & MARKED) != 0;
+ }
+
+ /**
+ * Atomically sets the mark on this task.
+ *
+ * @return true if this task was previously unmarked
+ * @since 1.8
+ */
+ public final boolean markForkJoinTask() {
+ for (int s;;) {
+ if (((s = status) & MARKED) != 0)
+ return false;
+ if (U.compareAndSwapInt(this, STATUS, s, s | MARKED))
+ return true;
+ }
+ }
+
+ /**
+ * Atomically clears the mark on this task.
+ *
+ * @return true if this task was previously marked
+ * @since 1.8
+ */
+ public final boolean unmarkForkJoinTask() {
+ for (int s;;) {
+ if (((s = status) & MARKED) == 0)
+ return false;
+ if (U.compareAndSwapInt(this, STATUS, s, s & ~MARKED))
+ return true;
+ }
+ }
+
+ /**
+ * Adaptor for Runnables. This implements RunnableFuture
+ * to be compliant with AbstractExecutorService constraints
+ * when used in ForkJoinPool.
+ */
+ static final class AdaptedRunnable<T> extends ForkJoinTask<T>
+ implements RunnableFuture<T> {
+ final Runnable runnable;
+ T result;
+ AdaptedRunnable(Runnable runnable, T result) {
+ if (runnable == null) throw new NullPointerException();
+ this.runnable = runnable;
+ this.result = result; // OK to set this even before completion
+ }
+ public final T getRawResult() { return result; }
+ public final void setRawResult(T v) { result = v; }
+ public final boolean exec() { runnable.run(); return true; }
+ public final void run() { invoke(); }
+ private static final long serialVersionUID = 5232453952276885070L;
+ }
+
+ /**
+ * Adaptor for Runnables without results
+ */
+ static final class AdaptedRunnableAction extends ForkJoinTask<Void>
+ implements RunnableFuture<Void> {
+ final Runnable runnable;
+ AdaptedRunnableAction(Runnable runnable) {
+ if (runnable == null) throw new NullPointerException();
+ this.runnable = runnable;
+ }
+ public final Void getRawResult() { return null; }
+ public final void setRawResult(Void v) { }
+ public final boolean exec() { runnable.run(); return true; }
+ public final void run() { invoke(); }
+ private static final long serialVersionUID = 5232453952276885070L;
+ }
+
+ /**
+ * Adaptor for Callables
+ */
+ static final class AdaptedCallable<T> extends ForkJoinTask<T>
+ implements RunnableFuture<T> {
+ final Callable<? extends T> callable;
+ T result;
+ AdaptedCallable(Callable<? extends T> callable) {
+ if (callable == null) throw new NullPointerException();
+ this.callable = callable;
+ }
+ public final T getRawResult() { return result; }
+ public final void setRawResult(T v) { result = v; }
+ public final boolean exec() {
+ try {
+ result = callable.call();
+ return true;
+ } catch (Error err) {
+ throw err;
+ } catch (RuntimeException rex) {
+ throw rex;
+ } catch (Exception ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+ public final void run() { invoke(); }
+ private static final long serialVersionUID = 2838392045355241008L;
+ }
+
+ /**
+ * Returns a new {@code ForkJoinTask} that performs the {@code run}
+ * method of the given {@code Runnable} as its action, and returns
+ * a null result upon {@link #join}.
+ *
+ * @param runnable the runnable action
+ * @return the task
+ */
+ public static ForkJoinTask<?> adapt(Runnable runnable) {
+ return new AdaptedRunnableAction(runnable);
+ }
+
+ /**
+ * Returns a new {@code ForkJoinTask} that performs the {@code run}
+ * method of the given {@code Runnable} as its action, and returns
+ * the given result upon {@link #join}.
+ *
+ * @param runnable the runnable action
+ * @param result the result upon completion
+ * @return the task
+ */
+ public static <T> ForkJoinTask<T> adapt(Runnable runnable, T result) {
+ return new AdaptedRunnable<T>(runnable, result);
+ }
+
+ /**
+ * Returns a new {@code ForkJoinTask} that performs the {@code call}
+ * method of the given {@code Callable} as its action, and returns
+ * its result upon {@link #join}, translating any checked exceptions
+ * encountered into {@code RuntimeException}.
+ *
+ * @param callable the callable action
+ * @return the task
+ */
+ public static <T> ForkJoinTask<T> adapt(Callable<? extends T> callable) {
+ return new AdaptedCallable<T>(callable);
}
// Serialization support
@@ -981,11 +1454,10 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
private static final long serialVersionUID = -7721805057305804111L;
/**
- * Save the state to a stream.
+ * Saves this task to a stream (that is, serializes it).
*
* @serialData the current run status and the exception thrown
- * during execution, or null if none.
- * @param s the stream
+ * during execution, or {@code null} if none
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
@@ -994,70 +1466,57 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
/**
- * Reconstitute the instance from a stream.
- * @param s the stream
+ * Reconstitutes this task from a stream (that is, deserializes it).
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
- status &= ~INTERNAL_SIGNAL_MASK; // clear internal signal counts
- status |= EXTERNAL_SIGNAL; // conservatively set external signal
Object ex = s.readObject();
if (ex != null)
- setDoneExceptionally((Throwable)ex);
+ setExceptionalCompletion((Throwable)ex);
+ }
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long STATUS;
+ static {
+ exceptionTableLock = new ReentrantLock();
+ exceptionTableRefQueue = new ReferenceQueue<Object>();
+ exceptionTable = new ExceptionNode[EXCEPTION_MAP_CAPACITY];
+ try {
+ U = getUnsafe();
+ STATUS = U.objectFieldOffset
+ (ForkJoinTask.class.getDeclaredField("status"));
+ } catch (Exception e) {
+ throw new Error(e);
+ }
}
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ private static sun.misc.Unsafe getUnsafe() {
try {
- return Unsafe.getUnsafe();
+ return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException se) {
try {
return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
+ (new java.security
+ .PrivilegedExceptionAction<sun.misc.Unsafe>() {
+ public sun.misc.Unsafe run() throws Exception {
+ java.lang.reflect.Field f = sun.misc
+ .Unsafe.class.getDeclaredField("theUnsafe");
+ f.setAccessible(true);
+ return (sun.misc.Unsafe) f.get(null);
}});
} catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
+ throw new RuntimeException("Could not initialize intrinsics",
+ e.getCause());
}
}
}
-
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName, Unsafe unsafe)
- throws NoSuchFieldException {
- // do not use _unsafe to avoid NPE
- return unsafe.objectFieldOffset
- (ForkJoinTask.class.getDeclaredField(fieldName));
- }
-
- static final Unsafe _unsafe;
- static final long statusOffset;
-
- static {
- Unsafe tmpUnsafe = null;
- long tmpStatusOffset = 0;
- try {
- tmpUnsafe = getUnsafe();
- tmpStatusOffset = fieldOffset("status", tmpUnsafe);
- } catch (Throwable e) {
- // Ignore the failure to load sun.misc.Unsafe on Android so
- // that platform can use the actor library without the
- // fork/join scheduler.
- String vmVendor = System.getProperty("java.vm.vendor");
- if (!vmVendor.contains("Android")) {
- throw new RuntimeException("Could not initialize intrinsics", e);
- }
- }
- _unsafe = tmpUnsafe;
- statusOffset = tmpStatusOffset;
- }
-
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
index b4d889750c..90a0af5723 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
@@ -1,224 +1,55 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.*;
-import java.util.concurrent.locks.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
/**
- * A thread managed by a {@link ForkJoinPool}. This class is
- * subclassable solely for the sake of adding functionality -- there
- * are no overridable methods dealing with scheduling or
- * execution. However, you can override initialization and termination
- * methods surrounding the main task processing loop. If you do
- * create such a subclass, you will also need to supply a custom
- * ForkJoinWorkerThreadFactory to use it in a ForkJoinPool.
+ * A thread managed by a {@link ForkJoinPool}, which executes
+ * {@link ForkJoinTask}s.
+ * This class is subclassable solely for the sake of adding
+ * functionality -- there are no overridable methods dealing with
+ * scheduling or execution. However, you can override initialization
+ * and termination methods surrounding the main task processing loop.
+ * If you do create such a subclass, you will also need to supply a
+ * custom {@link ForkJoinPool.ForkJoinWorkerThreadFactory} to use it
+ * in a {@code ForkJoinPool}.
*
+ * @since 1.7
+ * @author Doug Lea
*/
public class ForkJoinWorkerThread extends Thread {
/*
- * Algorithm overview:
- *
- * 1. Work-Stealing: Work-stealing queues are special forms of
- * Deques that support only three of the four possible
- * end-operations -- push, pop, and deq (aka steal), and only do
- * so under the constraints that push and pop are called only from
- * the owning thread, while deq may be called from other threads.
- * (If you are unfamiliar with them, you probably want to read
- * Herlihy and Shavit's book "The Art of Multiprocessor
- * programming", chapter 16 describing these in more detail before
- * proceeding.) The main work-stealing queue design is roughly
- * similar to "Dynamic Circular Work-Stealing Deque" by David
- * Chase and Yossi Lev, SPAA 2005
- * (http://research.sun.com/scalable/pubs/index.html). The main
- * difference ultimately stems from gc requirements that we null
- * out taken slots as soon as we can, to maintain as small a
- * footprint as possible even in programs generating huge numbers
- * of tasks. To accomplish this, we shift the CAS arbitrating pop
- * vs deq (steal) from being on the indices ("base" and "sp") to
- * the slots themselves (mainly via method "casSlotNull()"). So,
- * both a successful pop and deq mainly entail CAS'ing a nonnull
- * slot to null. Because we rely on CASes of references, we do
- * not need tag bits on base or sp. They are simple ints as used
- * in any circular array-based queue (see for example ArrayDeque).
- * Updates to the indices must still be ordered in a way that
- * guarantees that (sp - base) > 0 means the queue is empty, but
- * otherwise may err on the side of possibly making the queue
- * appear nonempty when a push, pop, or deq have not fully
- * committed. Note that this means that the deq operation,
- * considered individually, is not wait-free. One thief cannot
- * successfully continue until another in-progress one (or, if
- * previously empty, a push) completes. However, in the
- * aggregate, we ensure at least probablistic non-blockingness. If
- * an attempted steal fails, a thief always chooses a different
- * random victim target to try next. So, in order for one thief to
- * progress, it suffices for any in-progress deq or new push on
- * any empty queue to complete. One reason this works well here is
- * that apparently-nonempty often means soon-to-be-stealable,
- * which gives threads a chance to activate if necessary before
- * stealing (see below).
- *
- * Efficient implementation of this approach currently relies on
- * an uncomfortable amount of "Unsafe" mechanics. To maintain
- * correct orderings, reads and writes of variable base require
- * volatile ordering. Variable sp does not require volatile write
- * but needs cheaper store-ordering on writes. Because they are
- * protected by volatile base reads, reads of the queue array and
- * its slots do not need volatile load semantics, but writes (in
- * push) require store order and CASes (in pop and deq) require
- * (volatile) CAS semantics. Since these combinations aren't
- * supported using ordinary volatiles, the only way to accomplish
- * these effciently is to use direct Unsafe calls. (Using external
- * AtomicIntegers and AtomicReferenceArrays for the indices and
- * array is significantly slower because of memory locality and
- * indirection effects.) Further, performance on most platforms is
- * very sensitive to placement and sizing of the (resizable) queue
- * array. Even though these queues don't usually become all that
- * big, the initial size must be large enough to counteract cache
- * contention effects across multiple queues (especially in the
- * presence of GC cardmarking). Also, to improve thread-locality,
- * queues are currently initialized immediately after the thread
- * gets the initial signal to start processing tasks. However,
- * all queue-related methods except pushTask are written in a way
- * that allows them to instead be lazily allocated and/or disposed
- * of when empty. All together, these low-level implementation
- * choices produce as much as a factor of 4 performance
- * improvement compared to naive implementations, and enable the
- * processing of billions of tasks per second, sometimes at the
- * expense of ugliness.
- *
- * 2. Run control: The primary run control is based on a global
- * counter (activeCount) held by the pool. It uses an algorithm
- * similar to that in Herlihy and Shavit section 17.6 to cause
- * threads to eventually block when all threads declare they are
- * inactive. (See variable "scans".) For this to work, threads
- * must be declared active when executing tasks, and before
- * stealing a task. They must be inactive before blocking on the
- * Pool Barrier (awaiting a new submission or other Pool
- * event). In between, there is some free play which we take
- * advantage of to avoid contention and rapid flickering of the
- * global activeCount: If inactive, we activate only if a victim
- * queue appears to be nonempty (see above). Similarly, a thread
- * tries to inactivate only after a full scan of other threads.
- * The net effect is that contention on activeCount is rarely a
- * measurable performance issue. (There are also a few other cases
- * where we scan for work rather than retry/block upon
- * contention.)
- *
- * 3. Selection control. We maintain policy of always choosing to
- * run local tasks rather than stealing, and always trying to
- * steal tasks before trying to run a new submission. All steals
- * are currently performed in randomly-chosen deq-order. It may be
- * worthwhile to bias these with locality / anti-locality
- * information, but doing this well probably requires more
- * lower-level information from JVMs than currently provided.
- */
-
- /**
- * Capacity of work-stealing queue array upon initialization.
- * Must be a power of two. Initial size must be at least 2, but is
- * padded to minimize cache effects.
- */
- private static final int INITIAL_QUEUE_CAPACITY = 1 << 13;
-
- /**
- * Maximum work-stealing queue array size. Must be less than or
- * equal to 1 << 28 to ensure lack of index wraparound. (This
- * is less than usual bounds, because we need leftshift by 3
- * to be in int range).
- */
- private static final int MAXIMUM_QUEUE_CAPACITY = 1 << 28;
-
- /**
- * The pool this thread works in. Accessed directly by ForkJoinTask
- */
- final ForkJoinPool pool;
-
- /**
- * The work-stealing queue array. Size must be a power of two.
- * Initialized when thread starts, to improve memory locality.
- */
- private ForkJoinTask<?>[] queue;
-
- /**
- * Index (mod queue.length) of next queue slot to push to or pop
- * from. It is written only by owner thread, via ordered store.
- * Both sp and base are allowed to wrap around on overflow, but
- * (sp - base) still estimates size.
- */
- private volatile int sp;
-
- /**
- * Index (mod queue.length) of least valid queue slot, which is
- * always the next position to steal from if nonempty.
- */
- private volatile int base;
-
- /**
- * Activity status. When true, this worker is considered active.
- * Must be false upon construction. It must be true when executing
- * tasks, and BEFORE stealing a task. It must be false before
- * calling pool.sync
- */
- private boolean active;
-
- /**
- * Run state of this worker. Supports simple versions of the usual
- * shutdown/shutdownNow control.
- */
- private volatile int runState;
-
- /**
- * Seed for random number generator for choosing steal victims.
- * Uses Marsaglia xorshift. Must be nonzero upon initialization.
- */
- private int seed;
-
- /**
- * Number of steals, transferred to pool when idle
+ * ForkJoinWorkerThreads are managed by ForkJoinPools and perform
+ * ForkJoinTasks. For explanation, see the internal documentation
+ * of class ForkJoinPool.
*/
- private int stealCount;
- /**
- * Index of this worker in pool array. Set once by pool before
- * running, and accessed directly by pool during cleanup etc
- */
- int poolIndex;
-
- /**
- * The last barrier event waited for. Accessed in pool callback
- * methods, but only by current thread.
- */
- long lastEventCount;
-
- /**
- * True if use local fifo, not default lifo, for local polling
- */
- private boolean locallyFifo;
+ final ForkJoinPool.WorkQueue workQueue; // Work-stealing mechanics
+ final ForkJoinPool pool; // the pool this thread works in
/**
* Creates a ForkJoinWorkerThread operating in the given pool.
+ *
* @param pool the pool this thread works in
* @throws NullPointerException if pool is null
*/
protected ForkJoinWorkerThread(ForkJoinPool pool) {
- if (pool == null) throw new NullPointerException();
+ super(pool.nextWorkerName());
+ setDaemon(true);
+ Thread.UncaughtExceptionHandler ueh = pool.ueh;
+ if (ueh != null)
+ setUncaughtExceptionHandler(ueh);
this.pool = pool;
- // Note: poolIndex is set by pool during construction
- // Remaining initialization is deferred to onStart
+ pool.registerWorker(this.workQueue = new ForkJoinPool.WorkQueue
+ (pool, this, pool.localMode));
}
- // Public access methods
-
/**
- * Returns the pool hosting this thread
+ * Returns the pool hosting this thread.
+ *
* @return the pool
*/
public ForkJoinPool getPool() {
@@ -231,543 +62,58 @@ public class ForkJoinWorkerThread extends Thread {
* threads (minus one) that have ever been created in the pool.
* This method may be useful for applications that track status or
* collect results per-worker rather than per-task.
- * @return the index number.
+ *
+ * @return the index number
*/
public int getPoolIndex() {
- return poolIndex;
- }
-
- /**
- * Establishes local first-in-first-out scheduling mode for forked
- * tasks that are never joined.
- * @param async if true, use locally FIFO scheduling
- */
- void setAsyncMode(boolean async) {
- locallyFifo = async;
- }
-
- // Runstate management
-
- // Runstate values. Order matters
- private static final int RUNNING = 0;
- private static final int SHUTDOWN = 1;
- private static final int TERMINATING = 2;
- private static final int TERMINATED = 3;
-
- final boolean isShutdown() { return runState >= SHUTDOWN; }
- final boolean isTerminating() { return runState >= TERMINATING; }
- final boolean isTerminated() { return runState == TERMINATED; }
- final boolean shutdown() { return transitionRunStateTo(SHUTDOWN); }
- final boolean shutdownNow() { return transitionRunStateTo(TERMINATING); }
-
- /**
- * Transition to at least the given state. Return true if not
- * already at least given state.
- */
- private boolean transitionRunStateTo(int state) {
- for (;;) {
- int s = runState;
- if (s >= state)
- return false;
- if (_unsafe.compareAndSwapInt(this, runStateOffset, s, state))
- return true;
- }
- }
-
- /**
- * Try to set status to active; fail on contention
- */
- private boolean tryActivate() {
- if (!active) {
- if (!pool.tryIncrementActiveCount())
- return false;
- active = true;
- }
- return true;
- }
-
- /**
- * Try to set status to active; fail on contention
- */
- private boolean tryInactivate() {
- if (active) {
- if (!pool.tryDecrementActiveCount())
- return false;
- active = false;
- }
- return true;
- }
-
- /**
- * Computes next value for random victim probe. Scans don't
- * require a very high quality generator, but also not a crummy
- * one. Marsaglia xor-shift is cheap and works well.
- */
- private static int xorShift(int r) {
- r ^= r << 1;
- r ^= r >>> 3;
- r ^= r << 10;
- return r;
- }
-
- // Lifecycle methods
-
- /**
- * This method is required to be public, but should never be
- * called explicitly. It performs the main run loop to execute
- * ForkJoinTasks.
- */
- public void run() {
- Throwable exception = null;
- try {
- onStart();
- pool.sync(this); // await first pool event
- mainLoop();
- } catch (Throwable ex) {
- exception = ex;
- } finally {
- onTermination(exception);
- }
- }
-
- /**
- * Execute tasks until shut down.
- */
- private void mainLoop() {
- while (!isShutdown()) {
- ForkJoinTask<?> t = pollTask();
- if (t != null || (t = pollSubmission()) != null)
- t.quietlyExec();
- else if (tryInactivate())
- pool.sync(this);
- }
+ return workQueue.poolIndex;
}
/**
* Initializes internal state after construction but before
* processing any tasks. If you override this method, you must
- * invoke super.onStart() at the beginning of the method.
+ * invoke {@code super.onStart()} at the beginning of the method.
* Initialization requires care: Most fields must have legal
* default values, to ensure that attempted accesses from other
* threads work correctly even before this thread starts
* processing tasks.
*/
protected void onStart() {
- // Allocate while starting to improve chances of thread-local
- // isolation
- queue = new ForkJoinTask<?>[INITIAL_QUEUE_CAPACITY];
- // Initial value of seed need not be especially random but
- // should differ across workers and must be nonzero
- int p = poolIndex + 1;
- seed = p + (p << 8) + (p << 16) + (p << 24); // spread bits
}
/**
- * Perform cleanup associated with termination of this worker
+ * Performs cleanup associated with termination of this worker
* thread. If you override this method, you must invoke
- * super.onTermination at the end of the overridden method.
+ * {@code super.onTermination} at the end of the overridden method.
*
* @param exception the exception causing this thread to abort due
- * to an unrecoverable error, or null if completed normally.
+ * to an unrecoverable error, or {@code null} if completed normally
*/
protected void onTermination(Throwable exception) {
- // Execute remaining local tasks unless aborting or terminating
- while (exception == null && !pool.isTerminating() && base != sp) {
- try {
- ForkJoinTask<?> t = popTask();
- if (t != null)
- t.quietlyExec();
- } catch(Throwable ex) {
- exception = ex;
- }
- }
- // Cancel other tasks, transition status, notify pool, and
- // propagate exception to uncaught exception handler
- try {
- do;while (!tryInactivate()); // ensure inactive
- cancelTasks();
- runState = TERMINATED;
- pool.workerTerminated(this);
- } catch (Throwable ex) { // Shouldn't ever happen
- if (exception == null) // but if so, at least rethrown
- exception = ex;
- } finally {
- if (exception != null)
- ForkJoinTask.rethrowException(exception);
- }
}
- // Intrinsics-based support for queue operations.
-
/**
- * Add in store-order the given task at given slot of q to
- * null. Caller must ensure q is nonnull and index is in range.
- */
- private static void setSlot(ForkJoinTask<?>[] q, int i,
- ForkJoinTask<?> t){
- _unsafe.putOrderedObject(q, (i << qShift) + qBase, t);
- }
-
- /**
- * CAS given slot of q to null. Caller must ensure q is nonnull
- * and index is in range.
- */
- private static boolean casSlotNull(ForkJoinTask<?>[] q, int i,
- ForkJoinTask<?> t) {
- return _unsafe.compareAndSwapObject(q, (i << qShift) + qBase, t, null);
- }
-
- /**
- * Sets sp in store-order.
- */
- private void storeSp(int s) {
- _unsafe.putOrderedInt(this, spOffset, s);
- }
-
- // Main queue methods
-
- /**
- * Pushes a task. Called only by current thread.
- * @param t the task. Caller must ensure nonnull
- */
- final void pushTask(ForkJoinTask<?> t) {
- ForkJoinTask<?>[] q = queue;
- int mask = q.length - 1;
- int s = sp;
- setSlot(q, s & mask, t);
- storeSp(++s);
- if ((s -= base) == 1)
- pool.signalWork();
- else if (s >= mask)
- growQueue();
- }
-
- /**
- * Tries to take a task from the base of the queue, failing if
- * either empty or contended.
- * @return a task, or null if none or contended.
- */
- final ForkJoinTask<?> deqTask() {
- ForkJoinTask<?> t;
- ForkJoinTask<?>[] q;
- int i;
- int b;
- if (sp != (b = base) &&
- (q = queue) != null && // must read q after b
- (t = q[i = (q.length - 1) & b]) != null &&
- casSlotNull(q, i, t)) {
- base = b + 1;
- return t;
- }
- return null;
- }
-
- /**
- * Returns a popped task, or null if empty. Ensures active status
- * if nonnull. Called only by current thread.
- */
- final ForkJoinTask<?> popTask() {
- int s = sp;
- while (s != base) {
- if (tryActivate()) {
- ForkJoinTask<?>[] q = queue;
- int mask = q.length - 1;
- int i = (s - 1) & mask;
- ForkJoinTask<?> t = q[i];
- if (t == null || !casSlotNull(q, i, t))
- break;
- storeSp(s - 1);
- return t;
- }
- }
- return null;
- }
-
- /**
- * Specialized version of popTask to pop only if
- * topmost element is the given task. Called only
- * by current thread while active.
- * @param t the task. Caller must ensure nonnull
- */
- final boolean unpushTask(ForkJoinTask<?> t) {
- ForkJoinTask<?>[] q = queue;
- int mask = q.length - 1;
- int s = sp - 1;
- if (casSlotNull(q, s & mask, t)) {
- storeSp(s);
- return true;
- }
- return false;
- }
-
- /**
- * Returns next task.
- */
- final ForkJoinTask<?> peekTask() {
- ForkJoinTask<?>[] q = queue;
- if (q == null)
- return null;
- int mask = q.length - 1;
- int i = locallyFifo? base : (sp - 1);
- return q[i & mask];
- }
-
- /**
- * Doubles queue array size. Transfers elements by emulating
- * steals (deqs) from old array and placing, oldest first, into
- * new array.
- */
- private void growQueue() {
- ForkJoinTask<?>[] oldQ = queue;
- int oldSize = oldQ.length;
- int newSize = oldSize << 1;
- if (newSize > MAXIMUM_QUEUE_CAPACITY)
- throw new RejectedExecutionException("Queue capacity exceeded");
- ForkJoinTask<?>[] newQ = queue = new ForkJoinTask<?>[newSize];
-
- int b = base;
- int bf = b + oldSize;
- int oldMask = oldSize - 1;
- int newMask = newSize - 1;
- do {
- int oldIndex = b & oldMask;
- ForkJoinTask<?> t = oldQ[oldIndex];
- if (t != null && !casSlotNull(oldQ, oldIndex, t))
- t = null;
- setSlot(newQ, b & newMask, t);
- } while (++b != bf);
- pool.signalWork();
- }
-
- /**
- * Tries to steal a task from another worker. Starts at a random
- * index of workers array, and probes workers until finding one
- * with non-empty queue or finding that all are empty. It
- * randomly selects the first n probes. If these are empty, it
- * resorts to a full circular traversal, which is necessary to
- * accurately set active status by caller. Also restarts if pool
- * events occurred since last scan, which forces refresh of
- * workers array, in case barrier was associated with resize.
- *
- * This method must be both fast and quiet -- usually avoiding
- * memory accesses that could disrupt cache sharing etc other than
- * those needed to check for and take tasks. This accounts for,
- * among other things, updating random seed in place without
- * storing it until exit.
- *
- * @return a task, or null if none found
- */
- private ForkJoinTask<?> scan() {
- ForkJoinTask<?> t = null;
- int r = seed; // extract once to keep scan quiet
- ForkJoinWorkerThread[] ws; // refreshed on outer loop
- int mask; // must be power 2 minus 1 and > 0
- outer:do {
- if ((ws = pool.workers) != null && (mask = ws.length - 1) > 0) {
- int idx = r;
- int probes = ~mask; // use random index while negative
- for (;;) {
- r = xorShift(r); // update random seed
- ForkJoinWorkerThread v = ws[mask & idx];
- if (v == null || v.sp == v.base) {
- if (probes <= mask)
- idx = (probes++ < 0)? r : (idx + 1);
- else
- break;
- }
- else if (!tryActivate() || (t = v.deqTask()) == null)
- continue outer; // restart on contention
- else
- break outer;
- }
- }
- } while (pool.hasNewSyncEvent(this)); // retry on pool events
- seed = r;
- return t;
- }
-
- /**
- * gets and removes a local or stolen a task
- * @return a task, if available
- */
- final ForkJoinTask<?> pollTask() {
- ForkJoinTask<?> t = locallyFifo? deqTask() : popTask();
- if (t == null && (t = scan()) != null)
- ++stealCount;
- return t;
- }
-
- /**
- * gets a local task
- * @return a task, if available
- */
- final ForkJoinTask<?> pollLocalTask() {
- return locallyFifo? deqTask() : popTask();
- }
-
- /**
- * Returns a pool submission, if one exists, activating first.
- * @return a submission, if available
- */
- private ForkJoinTask<?> pollSubmission() {
- ForkJoinPool p = pool;
- while (p.hasQueuedSubmissions()) {
- ForkJoinTask<?> t;
- if (tryActivate() && (t = p.pollSubmission()) != null)
- return t;
- }
- return null;
- }
-
- // Methods accessed only by Pool
-
- /**
- * Removes and cancels all tasks in queue. Can be called from any
- * thread.
- */
- final void cancelTasks() {
- ForkJoinTask<?> t;
- while (base != sp && (t = deqTask()) != null)
- t.cancelIgnoringExceptions();
- }
-
- /**
- * Drains tasks to given collection c
- * @return the number of tasks drained
- */
- final int drainTasksTo(Collection<ForkJoinTask<?>> c) {
- int n = 0;
- ForkJoinTask<?> t;
- while (base != sp && (t = deqTask()) != null) {
- c.add(t);
- ++n;
- }
- return n;
- }
-
- /**
- * Get and clear steal count for accumulation by pool. Called
- * only when known to be idle (in pool.sync and termination).
- */
- final int getAndClearStealCount() {
- int sc = stealCount;
- stealCount = 0;
- return sc;
- }
-
- /**
- * Returns true if at least one worker in the given array appears
- * to have at least one queued task.
- * @param ws array of workers
- */
- static boolean hasQueuedTasks(ForkJoinWorkerThread[] ws) {
- if (ws != null) {
- int len = ws.length;
- for (int j = 0; j < 2; ++j) { // need two passes for clean sweep
- for (int i = 0; i < len; ++i) {
- ForkJoinWorkerThread w = ws[i];
- if (w != null && w.sp != w.base)
- return true;
- }
- }
- }
- return false;
- }
-
- // Support methods for ForkJoinTask
-
- /**
- * Returns an estimate of the number of tasks in the queue.
- */
- final int getQueueSize() {
- int n = sp - base;
- return n < 0? 0 : n; // suppress momentarily negative values
- }
-
- /**
- * Returns an estimate of the number of tasks, offset by a
- * function of number of idle workers.
- */
- final int getEstimatedSurplusTaskCount() {
- // The halving approximates weighting idle vs non-idle workers
- return (sp - base) - (pool.getIdleThreadCount() >>> 1);
- }
-
- /**
- * Scan, returning early if joinMe done
- */
- final ForkJoinTask<?> scanWhileJoining(ForkJoinTask<?> joinMe) {
- ForkJoinTask<?> t = pollTask();
- if (t != null && joinMe.status < 0 && sp == base) {
- pushTask(t); // unsteal if done and this task would be stealable
- t = null;
- }
- return t;
- }
-
- /**
- * Runs tasks until pool isQuiescent
+ * This method is required to be public, but should never be
+ * called explicitly. It performs the main run loop to execute
+ * {@link ForkJoinTask}s.
*/
- final void helpQuiescePool() {
- for (;;) {
- ForkJoinTask<?> t = pollTask();
- if (t != null)
- t.quietlyExec();
- else if (tryInactivate() && pool.isQuiescent())
- break;
- }
- do;while (!tryActivate()); // re-activate on exit
- }
-
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
+ public void run() {
+ Throwable exception = null;
try {
- return Unsafe.getUnsafe();
- } catch (SecurityException se) {
+ onStart();
+ pool.runWorker(workQueue);
+ } catch (Throwable ex) {
+ exception = ex;
+ } finally {
try {
- return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
- }});
- } catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
+ onTermination(exception);
+ } catch (Throwable ex) {
+ if (exception == null)
+ exception = ex;
+ } finally {
+ pool.deregisterWorker(this, exception);
}
}
}
-
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName)
- throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
- (ForkJoinWorkerThread.class.getDeclaredField(fieldName));
- }
-
- static final Unsafe _unsafe;
- static final long baseOffset;
- static final long spOffset;
- static final long runStateOffset;
- static final long qBase;
- static final int qShift;
- static {
- try {
- _unsafe = getUnsafe();
- baseOffset = fieldOffset("base");
- spOffset = fieldOffset("sp");
- runStateOffset = fieldOffset("runState");
- qBase = _unsafe.arrayBaseOffset(ForkJoinTask[].class);
- int s = _unsafe.arrayIndexScale(ForkJoinTask[].class);
- if ((s & (s-1)) != 0)
- throw new Error("data type scale not a power of two");
- qShift = 31 - Integer.numberOfLeadingZeros(s);
- } catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
- }
- }
}
+
diff --git a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
index 3b46c176ff..ceeb9212d5 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
@@ -1,30 +1,38 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.concurrent.*;
-import java.util.concurrent.locks.*;
-import java.util.concurrent.atomic.*;
-import java.util.*;
-import java.io.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
+
+import java.util.AbstractQueue;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.LockSupport;
/**
- * An unbounded {@linkplain TransferQueue} based on linked nodes.
+ * An unbounded {@link TransferQueue} based on linked nodes.
* This queue orders elements FIFO (first-in-first-out) with respect
* to any given producer. The <em>head</em> of the queue is that
* element that has been on the queue the longest time for some
* producer. The <em>tail</em> of the queue is that element that has
* been on the queue the shortest time for some producer.
*
- * <p>Beware that, unlike in most collections, the {@code size}
- * method is <em>NOT</em> a constant-time operation. Because of the
+ * <p>Beware that, unlike in most collections, the {@code size} method
+ * is <em>NOT</em> a constant-time operation. Because of the
* asynchronous nature of these queues, determining the current number
- * of elements requires a traversal of the elements.
+ * of elements requires a traversal of the elements, and so may report
+ * inaccurate results if this collection is modified during traversal.
+ * Additionally, the bulk operations {@code addAll},
+ * {@code removeAll}, {@code retainAll}, {@code containsAll},
+ * {@code equals}, and {@code toArray} are <em>not</em> guaranteed
+ * to be performed atomically. For example, an iterator operating
+ * concurrently with an {@code addAll} operation might view only some
+ * of the added elements.
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
@@ -44,381 +52,938 @@ import java.lang.reflect.*;
* @since 1.7
* @author Doug Lea
* @param <E> the type of elements held in this collection
- *
*/
public class LinkedTransferQueue<E> extends AbstractQueue<E>
implements TransferQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -3223113410248163686L;
/*
- * This class extends the approach used in FIFO-mode
- * SynchronousQueues. See the internal documentation, as well as
- * the PPoPP 2006 paper "Scalable Synchronous Queues" by Scherer,
- * Lea & Scott
- * (http://www.cs.rice.edu/~wns1/papers/2006-PPoPP-SQ.pdf)
+ * *** Overview of Dual Queues with Slack ***
+ *
+ * Dual Queues, introduced by Scherer and Scott
+ * (http://www.cs.rice.edu/~wns1/papers/2004-DISC-DDS.pdf) are
+ * (linked) queues in which nodes may represent either data or
+ * requests. When a thread tries to enqueue a data node, but
+ * encounters a request node, it instead "matches" and removes it;
+ * and vice versa for enqueuing requests. Blocking Dual Queues
+ * arrange that threads enqueuing unmatched requests block until
+ * other threads provide the match. Dual Synchronous Queues (see
+ * Scherer, Lea, & Scott
+ * http://www.cs.rochester.edu/u/scott/papers/2009_Scherer_CACM_SSQ.pdf)
+ * additionally arrange that threads enqueuing unmatched data also
+ * block. Dual Transfer Queues support all of these modes, as
+ * dictated by callers.
+ *
+ * A FIFO dual queue may be implemented using a variation of the
+ * Michael & Scott (M&S) lock-free queue algorithm
+ * (http://www.cs.rochester.edu/u/scott/papers/1996_PODC_queues.pdf).
+ * It maintains two pointer fields, "head", pointing to a
+ * (matched) node that in turn points to the first actual
+ * (unmatched) queue node (or null if empty); and "tail" that
+ * points to the last node on the queue (or again null if
+ * empty). For example, here is a possible queue with four data
+ * elements:
+ *
+ * head tail
+ * | |
+ * v v
+ * M -> U -> U -> U -> U
+ *
+ * The M&S queue algorithm is known to be prone to scalability and
+ * overhead limitations when maintaining (via CAS) these head and
+ * tail pointers. This has led to the development of
+ * contention-reducing variants such as elimination arrays (see
+ * Moir et al http://portal.acm.org/citation.cfm?id=1074013) and
+ * optimistic back pointers (see Ladan-Mozes & Shavit
+ * http://people.csail.mit.edu/edya/publications/OptimisticFIFOQueue-journal.pdf).
+ * However, the nature of dual queues enables a simpler tactic for
+ * improving M&S-style implementations when dual-ness is needed.
+ *
+ * In a dual queue, each node must atomically maintain its match
+ * status. While there are other possible variants, we implement
+ * this here as: for a data-mode node, matching entails CASing an
+ * "item" field from a non-null data value to null upon match, and
+ * vice-versa for request nodes, CASing from null to a data
+ * value. (Note that the linearization properties of this style of
+ * queue are easy to verify -- elements are made available by
+ * linking, and unavailable by matching.) Compared to plain M&S
+ * queues, this property of dual queues requires one additional
+ * successful atomic operation per enq/deq pair. But it also
+ * enables lower cost variants of queue maintenance mechanics. (A
+ * variation of this idea applies even for non-dual queues that
+ * support deletion of interior elements, such as
+ * j.u.c.ConcurrentLinkedQueue.)
+ *
+ * Once a node is matched, its match status can never again
+ * change. We may thus arrange that the linked list of them
+ * contain a prefix of zero or more matched nodes, followed by a
+ * suffix of zero or more unmatched nodes. (Note that we allow
+ * both the prefix and suffix to be zero length, which in turn
+ * means that we do not use a dummy header.) If we were not
+ * concerned with either time or space efficiency, we could
+ * correctly perform enqueue and dequeue operations by traversing
+ * from a pointer to the initial node; CASing the item of the
+ * first unmatched node on match and CASing the next field of the
+ * trailing node on appends. (Plus some special-casing when
+ * initially empty). While this would be a terrible idea in
+ * itself, it does have the benefit of not requiring ANY atomic
+ * updates on head/tail fields.
+ *
+ * We introduce here an approach that lies between the extremes of
+ * never versus always updating queue (head and tail) pointers.
+ * This offers a tradeoff between sometimes requiring extra
+ * traversal steps to locate the first and/or last unmatched
+ * nodes, versus the reduced overhead and contention of fewer
+ * updates to queue pointers. For example, a possible snapshot of
+ * a queue is:
+ *
+ * head tail
+ * | |
+ * v v
+ * M -> M -> U -> U -> U -> U
+ *
+ * The best value for this "slack" (the targeted maximum distance
+ * between the value of "head" and the first unmatched node, and
+ * similarly for "tail") is an empirical matter. We have found
+ * that using very small constants in the range of 1-3 work best
+ * over a range of platforms. Larger values introduce increasing
+ * costs of cache misses and risks of long traversal chains, while
+ * smaller values increase CAS contention and overhead.
+ *
+ * Dual queues with slack differ from plain M&S dual queues by
+ * virtue of only sometimes updating head or tail pointers when
+ * matching, appending, or even traversing nodes; in order to
+ * maintain a targeted slack. The idea of "sometimes" may be
+ * operationalized in several ways. The simplest is to use a
+ * per-operation counter incremented on each traversal step, and
+ * to try (via CAS) to update the associated queue pointer
+ * whenever the count exceeds a threshold. Another, that requires
+ * more overhead, is to use random number generators to update
+ * with a given probability per traversal step.
+ *
+ * In any strategy along these lines, because CASes updating
+ * fields may fail, the actual slack may exceed targeted
+ * slack. However, they may be retried at any time to maintain
+ * targets. Even when using very small slack values, this
+ * approach works well for dual queues because it allows all
+ * operations up to the point of matching or appending an item
+ * (hence potentially allowing progress by another thread) to be
+ * read-only, thus not introducing any further contention. As
+ * described below, we implement this by performing slack
+ * maintenance retries only after these points.
+ *
+ * As an accompaniment to such techniques, traversal overhead can
+ * be further reduced without increasing contention of head
+ * pointer updates: Threads may sometimes shortcut the "next" link
+ * path from the current "head" node to be closer to the currently
+ * known first unmatched node, and similarly for tail. Again, this
+ * may be triggered with using thresholds or randomization.
+ *
+ * These ideas must be further extended to avoid unbounded amounts
+ * of costly-to-reclaim garbage caused by the sequential "next"
+ * links of nodes starting at old forgotten head nodes: As first
+ * described in detail by Boehm
+ * (http://portal.acm.org/citation.cfm?doid=503272.503282) if a GC
+ * delays noticing that any arbitrarily old node has become
+ * garbage, all newer dead nodes will also be unreclaimed.
+ * (Similar issues arise in non-GC environments.) To cope with
+ * this in our implementation, upon CASing to advance the head
+ * pointer, we set the "next" link of the previous head to point
+ * only to itself; thus limiting the length of connected dead lists.
+ * (We also take similar care to wipe out possibly garbage
+ * retaining values held in other Node fields.) However, doing so
+ * adds some further complexity to traversal: If any "next"
+ * pointer links to itself, it indicates that the current thread
+ * has lagged behind a head-update, and so the traversal must
+ * continue from the "head". Traversals trying to find the
+ * current tail starting from "tail" may also encounter
+ * self-links, in which case they also continue at "head".
+ *
+ * It is tempting in slack-based scheme to not even use CAS for
+ * updates (similarly to Ladan-Mozes & Shavit). However, this
+ * cannot be done for head updates under the above link-forgetting
+ * mechanics because an update may leave head at a detached node.
+ * And while direct writes are possible for tail updates, they
+ * increase the risk of long retraversals, and hence long garbage
+ * chains, which can be much more costly than is worthwhile
+ * considering that the cost difference of performing a CAS vs
+ * write is smaller when they are not triggered on each operation
+ * (especially considering that writes and CASes equally require
+ * additional GC bookkeeping ("write barriers") that are sometimes
+ * more costly than the writes themselves because of contention).
+ *
+ * *** Overview of implementation ***
+ *
+ * We use a threshold-based approach to updates, with a slack
+ * threshold of two -- that is, we update head/tail when the
+ * current pointer appears to be two or more steps away from the
+ * first/last node. The slack value is hard-wired: a path greater
+ * than one is naturally implemented by checking equality of
+ * traversal pointers except when the list has only one element,
+ * in which case we keep slack threshold at one. Avoiding tracking
+ * explicit counts across method calls slightly simplifies an
+ * already-messy implementation. Using randomization would
+ * probably work better if there were a low-quality dirt-cheap
+ * per-thread one available, but even ThreadLocalRandom is too
+ * heavy for these purposes.
+ *
+ * With such a small slack threshold value, it is not worthwhile
+ * to augment this with path short-circuiting (i.e., unsplicing
+ * interior nodes) except in the case of cancellation/removal (see
+ * below).
+ *
+ * We allow both the head and tail fields to be null before any
+ * nodes are enqueued; initializing upon first append. This
+ * simplifies some other logic, as well as providing more
+ * efficient explicit control paths instead of letting JVMs insert
+ * implicit NullPointerExceptions when they are null. While not
+ * currently fully implemented, we also leave open the possibility
+ * of re-nulling these fields when empty (which is complicated to
+ * arrange, for little benefit.)
+ *
+ * All enqueue/dequeue operations are handled by the single method
+ * "xfer" with parameters indicating whether to act as some form
+ * of offer, put, poll, take, or transfer (each possibly with
+ * timeout). The relative complexity of using one monolithic
+ * method outweighs the code bulk and maintenance problems of
+ * using separate methods for each case.
*
- * The main extension is to provide different Wait modes for the
- * main "xfer" method that puts or takes items. These don't
- * impact the basic dual-queue logic, but instead control whether
- * or how threads block upon insertion of request or data nodes
- * into the dual queue. It also uses slightly different
- * conventions for tracking whether nodes are off-list or
- * cancelled.
+ * Operation consists of up to three phases. The first is
+ * implemented within method xfer, the second in tryAppend, and
+ * the third in method awaitMatch.
+ *
+ * 1. Try to match an existing node
+ *
+ * Starting at head, skip already-matched nodes until finding
+ * an unmatched node of opposite mode, if one exists, in which
+ * case matching it and returning, also if necessary updating
+ * head to one past the matched node (or the node itself if the
+ * list has no other unmatched nodes). If the CAS misses, then
+ * a loop retries advancing head by two steps until either
+ * success or the slack is at most two. By requiring that each
+ * attempt advances head by two (if applicable), we ensure that
+ * the slack does not grow without bound. Traversals also check
+ * if the initial head is now off-list, in which case they
+ * start at the new head.
+ *
+ * If no candidates are found and the call was untimed
+ * poll/offer, (argument "how" is NOW) return.
+ *
+ * 2. Try to append a new node (method tryAppend)
+ *
+ * Starting at current tail pointer, find the actual last node
+ * and try to append a new node (or if head was null, establish
+ * the first node). Nodes can be appended only if their
+ * predecessors are either already matched or are of the same
+ * mode. If we detect otherwise, then a new node with opposite
+ * mode must have been appended during traversal, so we must
+ * restart at phase 1. The traversal and update steps are
+ * otherwise similar to phase 1: Retrying upon CAS misses and
+ * checking for staleness. In particular, if a self-link is
+ * encountered, then we can safely jump to a node on the list
+ * by continuing the traversal at current head.
+ *
+ * On successful append, if the call was ASYNC, return.
+ *
+ * 3. Await match or cancellation (method awaitMatch)
+ *
+ * Wait for another thread to match node; instead cancelling if
+ * the current thread was interrupted or the wait timed out. On
+ * multiprocessors, we use front-of-queue spinning: If a node
+ * appears to be the first unmatched node in the queue, it
+ * spins a bit before blocking. In either case, before blocking
+ * it tries to unsplice any nodes between the current "head"
+ * and the first unmatched node.
+ *
+ * Front-of-queue spinning vastly improves performance of
+ * heavily contended queues. And so long as it is relatively
+ * brief and "quiet", spinning does not much impact performance
+ * of less-contended queues. During spins threads check their
+ * interrupt status and generate a thread-local random number
+ * to decide to occasionally perform a Thread.yield. While
+ * yield has underdefined specs, we assume that it might help,
+ * and will not hurt, in limiting impact of spinning on busy
+ * systems. We also use smaller (1/2) spins for nodes that are
+ * not known to be front but whose predecessors have not
+ * blocked -- these "chained" spins avoid artifacts of
+ * front-of-queue rules which otherwise lead to alternating
+ * nodes spinning vs blocking. Further, front threads that
+ * represent phase changes (from data to request node or vice
+ * versa) compared to their predecessors receive additional
+ * chained spins, reflecting longer paths typically required to
+ * unblock threads during phase changes.
+ *
+ *
+ * ** Unlinking removed interior nodes **
+ *
+ * In addition to minimizing garbage retention via self-linking
+ * described above, we also unlink removed interior nodes. These
+ * may arise due to timed out or interrupted waits, or calls to
+ * remove(x) or Iterator.remove. Normally, given a node that was
+ * at one time known to be the predecessor of some node s that is
+ * to be removed, we can unsplice s by CASing the next field of
+ * its predecessor if it still points to s (otherwise s must
+ * already have been removed or is now offlist). But there are two
+ * situations in which we cannot guarantee to make node s
+ * unreachable in this way: (1) If s is the trailing node of list
+ * (i.e., with null next), then it is pinned as the target node
+ * for appends, so can only be removed later after other nodes are
+ * appended. (2) We cannot necessarily unlink s given a
+ * predecessor node that is matched (including the case of being
+ * cancelled): the predecessor may already be unspliced, in which
+ * case some previous reachable node may still point to s.
+ * (For further explanation see Herlihy & Shavit "The Art of
+ * Multiprocessor Programming" chapter 9). Although, in both
+ * cases, we can rule out the need for further action if either s
+ * or its predecessor are (or can be made to be) at, or fall off
+ * from, the head of list.
+ *
+ * Without taking these into account, it would be possible for an
+ * unbounded number of supposedly removed nodes to remain
+ * reachable. Situations leading to such buildup are uncommon but
+ * can occur in practice; for example when a series of short timed
+ * calls to poll repeatedly time out but never otherwise fall off
+ * the list because of an untimed call to take at the front of the
+ * queue.
+ *
+ * When these cases arise, rather than always retraversing the
+ * entire list to find an actual predecessor to unlink (which
+ * won't help for case (1) anyway), we record a conservative
+ * estimate of possible unsplice failures (in "sweepVotes").
+ * We trigger a full sweep when the estimate exceeds a threshold
+ * ("SWEEP_THRESHOLD") indicating the maximum number of estimated
+ * removal failures to tolerate before sweeping through, unlinking
+ * cancelled nodes that were not unlinked upon initial removal.
+ * We perform sweeps by the thread hitting threshold (rather than
+ * background threads or by spreading work to other threads)
+ * because in the main contexts in which removal occurs, the
+ * caller is already timed-out, cancelled, or performing a
+ * potentially O(n) operation (e.g. remove(x)), none of which are
+ * time-critical enough to warrant the overhead that alternatives
+ * would impose on other threads.
+ *
+ * Because the sweepVotes estimate is conservative, and because
+ * nodes become unlinked "naturally" as they fall off the head of
+ * the queue, and because we allow votes to accumulate even while
+ * sweeps are in progress, there are typically significantly fewer
+ * such nodes than estimated. Choice of a threshold value
+ * balances the likelihood of wasted effort and contention, versus
+ * providing a worst-case bound on retention of interior nodes in
+ * quiescent queues. The value defined below was chosen
+ * empirically to balance these under various timeout scenarios.
+ *
+ * Note that we cannot self-link unlinked interior nodes during
+ * sweeps. However, the associated garbage chains terminate when
+ * some successor ultimately falls off the head of the list and is
+ * self-linked.
*/
- // Wait modes for xfer method
- static final int NOWAIT = 0;
- static final int TIMEOUT = 1;
- static final int WAIT = 2;
-
- /** The number of CPUs, for spin control */
- static final int NCPUS = Runtime.getRuntime().availableProcessors();
+ /** True if on multiprocessor */
+ private static final boolean MP =
+ Runtime.getRuntime().availableProcessors() > 1;
/**
- * The number of times to spin before blocking in timed waits.
- * The value is empirically derived -- it works well across a
- * variety of processors and OSes. Empirically, the best value
- * seems not to vary with number of CPUs (beyond 2) so is just
- * a constant.
+ * The number of times to spin (with randomly interspersed calls
+ * to Thread.yield) on multiprocessor before blocking when a node
+ * is apparently the first waiter in the queue. See above for
+ * explanation. Must be a power of two. The value is empirically
+ * derived -- it works pretty well across a variety of processors,
+ * numbers of CPUs, and OSes.
*/
- static final int maxTimedSpins = (NCPUS < 2)? 0 : 32;
+ private static final int FRONT_SPINS = 1 << 7;
/**
- * The number of times to spin before blocking in untimed waits.
- * This is greater than timed value because untimed waits spin
- * faster since they don't need to check times on each spin.
+ * The number of times to spin before blocking when a node is
+ * preceded by another node that is apparently spinning. Also
+ * serves as an increment to FRONT_SPINS on phase changes, and as
+ * base average frequency for yielding during spins. Must be a
+ * power of two.
*/
- static final int maxUntimedSpins = maxTimedSpins * 16;
+ private static final int CHAINED_SPINS = FRONT_SPINS >>> 1;
/**
- * The number of nanoseconds for which it is faster to spin
- * rather than to use timed park. A rough estimate suffices.
+ * The maximum number of estimated removal failures (sweepVotes)
+ * to tolerate before sweeping through the queue unlinking
+ * cancelled nodes that were not unlinked upon initial
+ * removal. See above for explanation. The value must be at least
+ * two to avoid useless sweeps when removing trailing nodes.
*/
- static final long spinForTimeoutThreshold = 1000L;
+ static final int SWEEP_THRESHOLD = 32;
/**
- * Node class for LinkedTransferQueue. Opportunistically
- * subclasses from AtomicReference to represent item. Uses Object,
- * not E, to allow setting item to "this" after use, to avoid
- * garbage retention. Similarly, setting the next field to this is
- * used as sentinel that node is off list.
+ * Queue nodes. Uses Object, not E, for items to allow forgetting
+ * them after use. Relies heavily on Unsafe mechanics to minimize
+ * unnecessary ordering constraints: Writes that are intrinsically
+ * ordered wrt other accesses or CASes use simple relaxed forms.
*/
- static final class QNode extends AtomicReference<Object> {
- volatile QNode next;
- volatile Thread waiter; // to control park/unpark
- final boolean isData;
- QNode(Object item, boolean isData) {
- super(item);
+ static final class Node {
+ final boolean isData; // false if this is a request node
+ volatile Object item; // initially non-null if isData; CASed to match
+ volatile Node next;
+ volatile Thread waiter; // null until waiting
+
+ // CAS methods for fields
+ final boolean casNext(Node cmp, Node val) {
+ return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val);
+ }
+
+ final boolean casItem(Object cmp, Object val) {
+ // assert cmp == null || cmp.getClass() != Node.class;
+ return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val);
+ }
+
+ /**
+ * Constructs a new node. Uses relaxed write because item can
+ * only be seen after publication via casNext.
+ */
+ Node(Object item, boolean isData) {
+ UNSAFE.putObject(this, itemOffset, item); // relaxed write
this.isData = isData;
}
- static final AtomicReferenceFieldUpdater<QNode, QNode>
- nextUpdater = AtomicReferenceFieldUpdater.newUpdater
- (QNode.class, QNode.class, "next");
+ /**
+ * Links node to itself to avoid garbage retention. Called
+ * only after CASing head field, so uses relaxed write.
+ */
+ final void forgetNext() {
+ UNSAFE.putObject(this, nextOffset, this);
+ }
- final boolean casNext(QNode cmp, QNode val) {
- return nextUpdater.compareAndSet(this, cmp, val);
+ /**
+ * Sets item to self and waiter to null, to avoid garbage
+ * retention after matching or cancelling. Uses relaxed writes
+ * because order is already constrained in the only calling
+ * contexts: item is forgotten only after volatile/atomic
+ * mechanics that extract items. Similarly, clearing waiter
+ * follows either CAS or return from park (if ever parked;
+ * else we don't care).
+ */
+ final void forgetContents() {
+ UNSAFE.putObject(this, itemOffset, this);
+ UNSAFE.putObject(this, waiterOffset, null);
}
- final void clearNext() {
- nextUpdater.lazySet(this, this);
+ /**
+ * Returns true if this node has been matched, including the
+ * case of artificial matches due to cancellation.
+ */
+ final boolean isMatched() {
+ Object x = item;
+ return (x == this) || ((x == null) == isData);
}
- }
+ /**
+ * Returns true if this is an unmatched request node.
+ */
+ final boolean isUnmatchedRequest() {
+ return !isData && item == null;
+ }
- /**
- * Padded version of AtomicReference used for head, tail and
- * cleanMe, to alleviate contention across threads CASing one vs
- * the other.
- */
- static final class PaddedAtomicReference<T> extends AtomicReference<T> {
- // enough padding for 64bytes with 4byte refs
- Object p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, pa, pb, pc, pd, pe;
- PaddedAtomicReference(T r) { super(r); }
+ /**
+ * Returns true if a node with the given mode cannot be
+ * appended to this node because this node is unmatched and
+ * has opposite data mode.
+ */
+ final boolean cannotPrecede(boolean haveData) {
+ boolean d = isData;
+ Object x;
+ return d != haveData && (x = item) != this && (x != null) == d;
+ }
+
+ /**
+ * Tries to artificially match a data node -- used by remove.
+ */
+ final boolean tryMatchData() {
+ // assert isData;
+ Object x = item;
+ if (x != null && x != this && casItem(x, null)) {
+ LockSupport.unpark(waiter);
+ return true;
+ }
+ return false;
+ }
+
+ private static final long serialVersionUID = -3375979862319811754L;
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe UNSAFE;
+ private static final long itemOffset;
+ private static final long nextOffset;
+ private static final long waiterOffset;
+ static {
+ try {
+ UNSAFE = getUnsafe();
+ Class<?> k = Node.class;
+ itemOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("item"));
+ nextOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("next"));
+ waiterOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("waiter"));
+ } catch (Exception e) {
+ throw new Error(e);
+ }
+ }
}
+ /** head of the queue; null until first enqueue */
+ transient volatile Node head;
- /** head of the queue */
- private transient final PaddedAtomicReference<QNode> head;
- /** tail of the queue */
- private transient final PaddedAtomicReference<QNode> tail;
+ /** tail of the queue; null until first append */
+ private transient volatile Node tail;
- /**
- * Reference to a cancelled node that might not yet have been
- * unlinked from queue because it was the last inserted node
- * when it cancelled.
- */
- private transient final PaddedAtomicReference<QNode> cleanMe;
+ /** The number of apparent failures to unsplice removed nodes */
+ private transient volatile int sweepVotes;
- /**
- * Tries to cas nh as new head; if successful, unlink
- * old head's next node to avoid garbage retention.
+ // CAS methods for fields
+ private boolean casTail(Node cmp, Node val) {
+ return UNSAFE.compareAndSwapObject(this, tailOffset, cmp, val);
+ }
+
+ private boolean casHead(Node cmp, Node val) {
+ return UNSAFE.compareAndSwapObject(this, headOffset, cmp, val);
+ }
+
+ private boolean casSweepVotes(int cmp, int val) {
+ return UNSAFE.compareAndSwapInt(this, sweepVotesOffset, cmp, val);
+ }
+
+ /*
+ * Possible values for "how" argument in xfer method.
*/
- private boolean advanceHead(QNode h, QNode nh) {
- if (h == head.get() && head.compareAndSet(h, nh)) {
- h.clearNext(); // forget old next
- return true;
- }
- return false;
+ private static final int NOW = 0; // for untimed poll, tryTransfer
+ private static final int ASYNC = 1; // for offer, put, add
+ private static final int SYNC = 2; // for transfer, take
+ private static final int TIMED = 3; // for timed poll, tryTransfer
+
+ @SuppressWarnings("unchecked")
+ static <E> E cast(Object item) {
+ // assert item == null || item.getClass() != Node.class;
+ return (E) item;
}
/**
- * Puts or takes an item. Used for most queue operations (except
- * poll() and tryTransfer()). See the similar code in
- * SynchronousQueue for detailed explanation.
+ * Implements all queuing methods. See above for explanation.
*
- * @param e the item or if null, signifies that this is a take
- * @param mode the wait mode: NOWAIT, TIMEOUT, WAIT
- * @param nanos timeout in nanosecs, used only if mode is TIMEOUT
- * @return an item, or null on failure
+ * @param e the item or null for take
+ * @param haveData true if this is a put, else a take
+ * @param how NOW, ASYNC, SYNC, or TIMED
+ * @param nanos timeout in nanosecs, used only if mode is TIMED
+ * @return an item if matched, else e
+ * @throws NullPointerException if haveData mode but e is null
*/
- private Object xfer(Object e, int mode, long nanos) {
- boolean isData = (e != null);
- QNode s = null;
- final PaddedAtomicReference<QNode> head = this.head;
- final PaddedAtomicReference<QNode> tail = this.tail;
-
- for (;;) {
- QNode t = tail.get();
- QNode h = head.get();
-
- if (t != null && (t == h || t.isData == isData)) {
- if (s == null)
- s = new QNode(e, isData);
- QNode last = t.next;
- if (last != null) {
- if (t == tail.get())
- tail.compareAndSet(t, last);
- }
- else if (t.casNext(null, s)) {
- tail.compareAndSet(t, s);
- return awaitFulfill(t, s, e, mode, nanos);
+ private E xfer(E e, boolean haveData, int how, long nanos) {
+ if (haveData && (e == null))
+ throw new NullPointerException();
+ Node s = null; // the node to append, if needed
+
+ retry:
+ for (;;) { // restart on append race
+
+ for (Node h = head, p = h; p != null;) { // find & match first node
+ boolean isData = p.isData;
+ Object item = p.item;
+ if (item != p && (item != null) == isData) { // unmatched
+ if (isData == haveData) // can't match
+ break;
+ if (p.casItem(item, e)) { // match
+ for (Node q = p; q != h;) {
+ Node n = q.next; // update by 2 unless singleton
+ if (head == h && casHead(h, n == null ? q : n)) {
+ h.forgetNext();
+ break;
+ } // advance and retry
+ if ((h = head) == null ||
+ (q = h.next) == null || !q.isMatched())
+ break; // unless slack < 2
+ }
+ LockSupport.unpark(p.waiter);
+ return LinkedTransferQueue.<E>cast(item);
+ }
}
+ Node n = p.next;
+ p = (p != n) ? n : (h = head); // Use head if p offlist
}
- else if (h != null) {
- QNode first = h.next;
- if (t == tail.get() && first != null &&
- advanceHead(h, first)) {
- Object x = first.get();
- if (x != first && first.compareAndSet(x, e)) {
- LockSupport.unpark(first.waiter);
- return isData? e : x;
- }
- }
+ if (how != NOW) { // No matches available
+ if (s == null)
+ s = new Node(e, haveData);
+ Node pred = tryAppend(s, haveData);
+ if (pred == null)
+ continue retry; // lost race vs opposite mode
+ if (how != ASYNC)
+ return awaitMatch(s, pred, e, (how == TIMED), nanos);
}
+ return e; // not waiting
}
}
-
/**
- * Version of xfer for poll() and tryTransfer, which
- * simplifies control paths both here and in xfer.
+ * Tries to append node s as tail.
+ *
+ * @param s the node to append
+ * @param haveData true if appending in data mode
+ * @return null on failure due to losing race with append in
+ * different mode, else s's predecessor, or s itself if no
+ * predecessor
*/
- private Object fulfill(Object e) {
- boolean isData = (e != null);
- final PaddedAtomicReference<QNode> head = this.head;
- final PaddedAtomicReference<QNode> tail = this.tail;
-
- for (;;) {
- QNode t = tail.get();
- QNode h = head.get();
-
- if (t != null && (t == h || t.isData == isData)) {
- QNode last = t.next;
- if (t == tail.get()) {
- if (last != null)
- tail.compareAndSet(t, last);
- else
- return null;
- }
+ private Node tryAppend(Node s, boolean haveData) {
+ for (Node t = tail, p = t;;) { // move p to last node and append
+ Node n, u; // temps for reads of next & tail
+ if (p == null && (p = head) == null) {
+ if (casHead(null, s))
+ return s; // initialize
}
- else if (h != null) {
- QNode first = h.next;
- if (t == tail.get() &&
- first != null &&
- advanceHead(h, first)) {
- Object x = first.get();
- if (x != first && first.compareAndSet(x, e)) {
- LockSupport.unpark(first.waiter);
- return isData? e : x;
- }
+ else if (p.cannotPrecede(haveData))
+ return null; // lost race vs opposite mode
+ else if ((n = p.next) != null) // not last; keep traversing
+ p = p != t && t != (u = tail) ? (t = u) : // stale tail
+ (p != n) ? n : null; // restart if off list
+ else if (!p.casNext(null, s))
+ p = p.next; // re-read on CAS failure
+ else {
+ if (p != t) { // update if slack now >= 2
+ while ((tail != t || !casTail(t, s)) &&
+ (t = tail) != null &&
+ (s = t.next) != null && // advance and retry
+ (s = s.next) != null && s != t);
}
+ return p;
}
}
}
/**
- * Spins/blocks until node s is fulfilled or caller gives up,
- * depending on wait mode.
+ * Spins/yields/blocks until node s is matched or caller gives up.
*
- * @param pred the predecessor of waiting node
* @param s the waiting node
+ * @param pred the predecessor of s, or s itself if it has no
+ * predecessor, or null if unknown (the null case does not occur
+ * in any current calls but may in possible future extensions)
* @param e the comparison value for checking match
- * @param mode mode
- * @param nanos timeout value
- * @return matched item, or s if cancelled
+ * @param timed if true, wait only until timeout elapses
+ * @param nanos timeout in nanosecs, used only if timed is true
+ * @return matched item, or e if unmatched on interrupt or timeout
*/
- private Object awaitFulfill(QNode pred, QNode s, Object e,
- int mode, long nanos) {
- if (mode == NOWAIT)
- return null;
-
- long lastTime = (mode == TIMEOUT)? System.nanoTime() : 0;
+ private E awaitMatch(Node s, Node pred, E e, boolean timed, long nanos) {
+ long lastTime = timed ? System.nanoTime() : 0L;
Thread w = Thread.currentThread();
- int spins = -1; // set to desired spin count below
+ int spins = -1; // initialized after first item and cancel checks
+ ThreadLocalRandom randomYields = null; // bound if needed
+
for (;;) {
- if (w.isInterrupted())
- s.compareAndSet(e, s);
- Object x = s.get();
- if (x != e) { // Node was matched or cancelled
- advanceHead(pred, s); // unlink if head
- if (x == s) { // was cancelled
- clean(pred, s);
- return null;
- }
- else if (x != null) {
- s.set(s); // avoid garbage retention
- return x;
- }
- else
- return e;
+ Object item = s.item;
+ if (item != e) { // matched
+ // assert item != s;
+ s.forgetContents(); // avoid garbage
+ return LinkedTransferQueue.<E>cast(item);
}
- if (mode == TIMEOUT) {
- long now = System.nanoTime();
- nanos -= now - lastTime;
- lastTime = now;
- if (nanos <= 0) {
- s.compareAndSet(e, s); // try to cancel
- continue;
- }
+ if ((w.isInterrupted() || (timed && nanos <= 0)) &&
+ s.casItem(e, s)) { // cancel
+ unsplice(pred, s);
+ return e;
}
- if (spins < 0) {
- QNode h = head.get(); // only spin if at head
- spins = ((h != null && h.next == s) ?
- (mode == TIMEOUT?
- maxTimedSpins : maxUntimedSpins) : 0);
+
+ if (spins < 0) { // establish spins at/near front
+ if ((spins = spinsFor(pred, s.isData)) > 0)
+ randomYields = ThreadLocalRandom.current();
}
- if (spins > 0)
+ else if (spins > 0) { // spin
--spins;
- else if (s.waiter == null)
- s.waiter = w;
- else if (mode != TIMEOUT) {
- LockSupport.park(this);
- s.waiter = null;
- spins = -1;
+ if (randomYields.nextInt(CHAINED_SPINS) == 0)
+ Thread.yield(); // occasionally yield
+ }
+ else if (s.waiter == null) {
+ s.waiter = w; // request unpark then recheck
}
- else if (nanos > spinForTimeoutThreshold) {
- LockSupport.parkNanos(this, nanos);
- s.waiter = null;
- spins = -1;
+ else if (timed) {
+ long now = System.nanoTime();
+ if ((nanos -= now - lastTime) > 0)
+ LockSupport.parkNanos(this, nanos);
+ lastTime = now;
+ }
+ else {
+ LockSupport.park(this);
}
}
}
/**
- * Returns validated tail for use in cleaning methods.
+ * Returns spin/yield value for a node with given predecessor and
+ * data mode. See above for explanation.
*/
- private QNode getValidatedTail() {
- for (;;) {
- QNode h = head.get();
- QNode first = h.next;
- if (first != null && first.next == first) { // help advance
- advanceHead(h, first);
- continue;
- }
- QNode t = tail.get();
- QNode last = t.next;
- if (t == tail.get()) {
- if (last != null)
- tail.compareAndSet(t, last); // help advance
- else
- return t;
+ private static int spinsFor(Node pred, boolean haveData) {
+ if (MP && pred != null) {
+ if (pred.isData != haveData) // phase change
+ return FRONT_SPINS + CHAINED_SPINS;
+ if (pred.isMatched()) // probably at front
+ return FRONT_SPINS;
+ if (pred.waiter == null) // pred apparently spinning
+ return CHAINED_SPINS;
+ }
+ return 0;
+ }
+
+ /* -------------- Traversal methods -------------- */
+
+ /**
+ * Returns the successor of p, or the head node if p.next has been
+ * linked to self, which will only be true if traversing with a
+ * stale pointer that is now off the list.
+ */
+ final Node succ(Node p) {
+ Node next = p.next;
+ return (p == next) ? head : next;
+ }
+
+ /**
+ * Returns the first unmatched node of the given mode, or null if
+ * none. Used by methods isEmpty, hasWaitingConsumer.
+ */
+ private Node firstOfMode(boolean isData) {
+ for (Node p = head; p != null; p = succ(p)) {
+ if (!p.isMatched())
+ return (p.isData == isData) ? p : null;
+ }
+ return null;
+ }
+
+ /**
+ * Returns the item in the first unmatched node with isData; or
+ * null if none. Used by peek.
+ */
+ private E firstDataItem() {
+ for (Node p = head; p != null; p = succ(p)) {
+ Object item = p.item;
+ if (p.isData) {
+ if (item != null && item != p)
+ return LinkedTransferQueue.<E>cast(item);
}
+ else if (item == null)
+ return null;
}
+ return null;
}
/**
- * Gets rid of cancelled node s with original predecessor pred.
- *
- * @param pred predecessor of cancelled node
- * @param s the cancelled node
+ * Traverses and counts unmatched nodes of the given mode.
+ * Used by methods size and getWaitingConsumerCount.
*/
- private void clean(QNode pred, QNode s) {
- Thread w = s.waiter;
- if (w != null) { // Wake up thread
- s.waiter = null;
- if (w != Thread.currentThread())
- LockSupport.unpark(w);
+ private int countOfMode(boolean data) {
+ int count = 0;
+ for (Node p = head; p != null; ) {
+ if (!p.isMatched()) {
+ if (p.isData != data)
+ return 0;
+ if (++count == Integer.MAX_VALUE) // saturated
+ break;
+ }
+ Node n = p.next;
+ if (n != p)
+ p = n;
+ else {
+ count = 0;
+ p = head;
+ }
}
+ return count;
+ }
- if (pred == null)
- return;
+ final class Itr implements Iterator<E> {
+ private Node nextNode; // next node to return item for
+ private E nextItem; // the corresponding item
+ private Node lastRet; // last returned node, to support remove
+ private Node lastPred; // predecessor to unlink lastRet
- /*
- * At any given time, exactly one node on list cannot be
- * deleted -- the last inserted node. To accommodate this, if
- * we cannot delete s, we save its predecessor as "cleanMe",
- * processing the previously saved version first. At least one
- * of node s or the node previously saved can always be
- * processed, so this always terminates.
+ /**
+ * Moves to next node after prev, or first node if prev null.
*/
- while (pred.next == s) {
- QNode oldpred = reclean(); // First, help get rid of cleanMe
- QNode t = getValidatedTail();
- if (s != t) { // If not tail, try to unsplice
- QNode sn = s.next; // s.next == s means s already off list
- if (sn == s || pred.casNext(s, sn))
+ private void advance(Node prev) {
+ /*
+ * To track and avoid buildup of deleted nodes in the face
+ * of calls to both Queue.remove and Itr.remove, we must
+ * include variants of unsplice and sweep upon each
+ * advance: Upon Itr.remove, we may need to catch up links
+ * from lastPred, and upon other removes, we might need to
+ * skip ahead from stale nodes and unsplice deleted ones
+ * found while advancing.
+ */
+
+ Node r, b; // reset lastPred upon possible deletion of lastRet
+ if ((r = lastRet) != null && !r.isMatched())
+ lastPred = r; // next lastPred is old lastRet
+ else if ((b = lastPred) == null || b.isMatched())
+ lastPred = null; // at start of list
+ else {
+ Node s, n; // help with removal of lastPred.next
+ while ((s = b.next) != null &&
+ s != b && s.isMatched() &&
+ (n = s.next) != null && n != s)
+ b.casNext(s, n);
+ }
+
+ this.lastRet = prev;
+
+ for (Node p = prev, s, n;;) {
+ s = (p == null) ? head : p.next;
+ if (s == null)
+ break;
+ else if (s == p) {
+ p = null;
+ continue;
+ }
+ Object item = s.item;
+ if (s.isData) {
+ if (item != null && item != s) {
+ nextItem = LinkedTransferQueue.<E>cast(item);
+ nextNode = s;
+ return;
+ }
+ }
+ else if (item == null)
+ break;
+ // assert s.isMatched();
+ if (p == null)
+ p = s;
+ else if ((n = s.next) == null)
break;
+ else if (s == n)
+ p = null;
+ else
+ p.casNext(s, n);
}
- else if (oldpred == pred || // Already saved
- (oldpred == null && cleanMe.compareAndSet(null, pred)))
- break; // Postpone cleaning
+ nextNode = null;
+ nextItem = null;
+ }
+
+ Itr() {
+ advance(null);
+ }
+
+ public final boolean hasNext() {
+ return nextNode != null;
+ }
+
+ public final E next() {
+ Node p = nextNode;
+ if (p == null) throw new NoSuchElementException();
+ E e = nextItem;
+ advance(p);
+ return e;
+ }
+
+ public final void remove() {
+ final Node lastRet = this.lastRet;
+ if (lastRet == null)
+ throw new IllegalStateException();
+ this.lastRet = null;
+ if (lastRet.tryMatchData())
+ unsplice(lastPred, lastRet);
}
}
+ /* -------------- Removal methods -------------- */
+
/**
- * Tries to unsplice the cancelled node held in cleanMe that was
- * previously uncleanable because it was at tail.
+ * Unsplices (now or later) the given deleted/cancelled node with
+ * the given predecessor.
*
- * @return current cleanMe node (or null)
+ * @param pred a node that was at one time known to be the
+ * predecessor of s, or null or s itself if s is/was at head
+ * @param s the node to be unspliced
*/
- private QNode reclean() {
+ final void unsplice(Node pred, Node s) {
+ s.forgetContents(); // forget unneeded fields
/*
- * cleanMe is, or at one time was, predecessor of cancelled
- * node s that was the tail so could not be unspliced. If s
- * is no longer the tail, try to unsplice if necessary and
- * make cleanMe slot available. This differs from similar
- * code in clean() because we must check that pred still
- * points to a cancelled node that must be unspliced -- if
- * not, we can (must) clear cleanMe without unsplicing.
- * This can loop only due to contention on casNext or
- * clearing cleanMe.
+ * See above for rationale. Briefly: if pred still points to
+ * s, try to unlink s. If s cannot be unlinked, because it is
+ * trailing node or pred might be unlinked, and neither pred
+ * nor s are head or offlist, add to sweepVotes, and if enough
+ * votes have accumulated, sweep.
*/
- QNode pred;
- while ((pred = cleanMe.get()) != null) {
- QNode t = getValidatedTail();
- QNode s = pred.next;
- if (s != t) {
- QNode sn;
- if (s == null || s == pred || s.get() != s ||
- (sn = s.next) == s || pred.casNext(s, sn))
- cleanMe.compareAndSet(pred, null);
+ if (pred != null && pred != s && pred.next == s) {
+ Node n = s.next;
+ if (n == null ||
+ (n != s && pred.casNext(s, n) && pred.isMatched())) {
+ for (;;) { // check if at, or could be, head
+ Node h = head;
+ if (h == pred || h == s || h == null)
+ return; // at head or list empty
+ if (!h.isMatched())
+ break;
+ Node hn = h.next;
+ if (hn == null)
+ return; // now empty
+ if (hn != h && casHead(h, hn))
+ h.forgetNext(); // advance head
+ }
+ if (pred.next != pred && s.next != s) { // recheck if offlist
+ for (;;) { // sweep now if enough votes
+ int v = sweepVotes;
+ if (v < SWEEP_THRESHOLD) {
+ if (casSweepVotes(v, v + 1))
+ break;
+ }
+ else if (casSweepVotes(v, 0)) {
+ sweep();
+ break;
+ }
+ }
+ }
}
- else // s is still tail; cannot clean
+ }
+ }
+
+ /**
+ * Unlinks matched (typically cancelled) nodes encountered in a
+ * traversal from head.
+ */
+ private void sweep() {
+ for (Node p = head, s, n; p != null && (s = p.next) != null; ) {
+ if (!s.isMatched())
+ // Unmatched nodes are never self-linked
+ p = s;
+ else if ((n = s.next) == null) // trailing node is pinned
break;
+ else if (s == n) // stale
+ // No need to also check for p == s, since that implies s == n
+ p = head;
+ else
+ p.casNext(s, n);
}
- return pred;
}
/**
+ * Main implementation of remove(Object)
+ */
+ private boolean findAndRemove(Object e) {
+ if (e != null) {
+ for (Node pred = null, p = head; p != null; ) {
+ Object item = p.item;
+ if (p.isData) {
+ if (item != null && item != p && e.equals(item) &&
+ p.tryMatchData()) {
+ unsplice(pred, p);
+ return true;
+ }
+ }
+ else if (item == null)
+ break;
+ pred = p;
+ if ((p = p.next) == pred) { // stale
+ pred = null;
+ p = head;
+ }
+ }
+ }
+ return false;
+ }
+
+
+ /**
* Creates an initially empty {@code LinkedTransferQueue}.
*/
public LinkedTransferQueue() {
- QNode dummy = new QNode(null, false);
- head = new PaddedAtomicReference<QNode>(dummy);
- tail = new PaddedAtomicReference<QNode>(dummy);
- cleanMe = new PaddedAtomicReference<QNode>(null);
}
/**
@@ -435,252 +1000,200 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
addAll(c);
}
- public void put(E e) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (Thread.interrupted()) throw new InterruptedException();
- xfer(e, NOWAIT, 0);
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never block.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
+ public void put(E e) {
+ xfer(e, true, ASYNC, 0);
}
- public boolean offer(E e, long timeout, TimeUnit unit)
- throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (Thread.interrupted()) throw new InterruptedException();
- xfer(e, NOWAIT, 0);
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never block or
+ * return {@code false}.
+ *
+ * @return {@code true} (as specified by
+ * {@link java.util.concurrent.BlockingQueue#offer(Object,long,TimeUnit)
+ * BlockingQueue.offer})
+ * @throws NullPointerException if the specified element is null
+ */
+ public boolean offer(E e, long timeout, TimeUnit unit) {
+ xfer(e, true, ASYNC, 0);
return true;
}
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never return {@code false}.
+ *
+ * @return {@code true} (as specified by {@link Queue#offer})
+ * @throws NullPointerException if the specified element is null
+ */
public boolean offer(E e) {
- if (e == null) throw new NullPointerException();
- xfer(e, NOWAIT, 0);
+ xfer(e, true, ASYNC, 0);
return true;
}
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never throw
+ * {@link IllegalStateException} or return {@code false}.
+ *
+ * @return {@code true} (as specified by {@link Collection#add})
+ * @throws NullPointerException if the specified element is null
+ */
public boolean add(E e) {
- if (e == null) throw new NullPointerException();
- xfer(e, NOWAIT, 0);
+ xfer(e, true, ASYNC, 0);
return true;
}
+ /**
+ * Transfers the element to a waiting consumer immediately, if possible.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * otherwise returning {@code false} without enqueuing the element.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
+ public boolean tryTransfer(E e) {
+ return xfer(e, true, NOW, 0) == null;
+ }
+
+ /**
+ * Transfers the element to a consumer, waiting if necessary to do so.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else inserts the specified element at the tail of this queue
+ * and waits until the element is received by a consumer.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
public void transfer(E e) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (xfer(e, WAIT, 0) == null) {
- Thread.interrupted();
+ if (xfer(e, true, SYNC, 0) != null) {
+ Thread.interrupted(); // failure possible only due to interrupt
throw new InterruptedException();
}
}
+ /**
+ * Transfers the element to a consumer if it is possible to do so
+ * before the timeout elapses.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else inserts the specified element at the tail of this queue
+ * and waits until the element is received by a consumer,
+ * returning {@code false} if the specified wait time elapses
+ * before the element can be transferred.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
public boolean tryTransfer(E e, long timeout, TimeUnit unit)
throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (xfer(e, TIMEOUT, unit.toNanos(timeout)) != null)
+ if (xfer(e, true, TIMED, unit.toNanos(timeout)) == null)
return true;
if (!Thread.interrupted())
return false;
throw new InterruptedException();
}
- public boolean tryTransfer(E e) {
- if (e == null) throw new NullPointerException();
- return fulfill(e) != null;
- }
-
public E take() throws InterruptedException {
- Object e = xfer(null, WAIT, 0);
+ E e = xfer(null, false, SYNC, 0);
if (e != null)
- return (E)e;
+ return e;
Thread.interrupted();
throw new InterruptedException();
}
public E poll(long timeout, TimeUnit unit) throws InterruptedException {
- Object e = xfer(null, TIMEOUT, unit.toNanos(timeout));
+ E e = xfer(null, false, TIMED, unit.toNanos(timeout));
if (e != null || !Thread.interrupted())
- return (E)e;
+ return e;
throw new InterruptedException();
}
public E poll() {
- return (E)fulfill(null);
+ return xfer(null, false, NOW, 0);
}
+ /**
+ * @throws NullPointerException {@inheritDoc}
+ * @throws IllegalArgumentException {@inheritDoc}
+ */
public int drainTo(Collection<? super E> c) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
int n = 0;
- E e;
- while ( (e = poll()) != null) {
+ for (E e; (e = poll()) != null;) {
c.add(e);
++n;
}
return n;
}
+ /**
+ * @throws NullPointerException {@inheritDoc}
+ * @throws IllegalArgumentException {@inheritDoc}
+ */
public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
int n = 0;
- E e;
- while (n < maxElements && (e = poll()) != null) {
+ for (E e; n < maxElements && (e = poll()) != null;) {
c.add(e);
++n;
}
return n;
}
- // Traversal-based methods
-
/**
- * Returns head after performing any outstanding helping steps.
+ * Returns an iterator over the elements in this queue in proper sequence.
+ * The elements will be returned in order from first (head) to last (tail).
+ *
+ * <p>The returned iterator is a "weakly consistent" iterator that
+ * will never throw {@link java.util.ConcurrentModificationException
+ * ConcurrentModificationException}, and guarantees to traverse
+ * elements as they existed upon construction of the iterator, and
+ * may (but is not guaranteed to) reflect any modifications
+ * subsequent to construction.
+ *
+ * @return an iterator over the elements in this queue in proper sequence
*/
- private QNode traversalHead() {
- for (;;) {
- QNode t = tail.get();
- QNode h = head.get();
- if (h != null && t != null) {
- QNode last = t.next;
- QNode first = h.next;
- if (t == tail.get()) {
- if (last != null)
- tail.compareAndSet(t, last);
- else if (first != null) {
- Object x = first.get();
- if (x == first)
- advanceHead(h, first);
- else
- return h;
- }
- else
- return h;
- }
- }
- reclean();
- }
- }
-
-
public Iterator<E> iterator() {
return new Itr();
}
- /**
- * Iterators. Basic strategy is to traverse list, treating
- * non-data (i.e., request) nodes as terminating list.
- * Once a valid data node is found, the item is cached
- * so that the next call to next() will return it even
- * if subsequently removed.
- */
- class Itr implements Iterator<E> {
- QNode next; // node to return next
- QNode pnext; // predecessor of next
- QNode snext; // successor of next
- QNode curr; // last returned node, for remove()
- QNode pcurr; // predecessor of curr, for remove()
- E nextItem; // Cache of next item, once commited to in next
-
- Itr() {
- findNext();
- }
-
- /**
- * Ensures next points to next valid node, or null if none.
- */
- void findNext() {
- for (;;) {
- QNode pred = pnext;
- QNode q = next;
- if (pred == null || pred == q) {
- pred = traversalHead();
- q = pred.next;
- }
- if (q == null || !q.isData) {
- next = null;
- return;
- }
- Object x = q.get();
- QNode s = q.next;
- if (x != null && q != x && q != s) {
- nextItem = (E)x;
- snext = s;
- pnext = pred;
- next = q;
- return;
- }
- pnext = q;
- next = s;
- }
- }
-
- public boolean hasNext() {
- return next != null;
- }
-
- public E next() {
- if (next == null) throw new NoSuchElementException();
- pcurr = pnext;
- curr = next;
- pnext = next;
- next = snext;
- E x = nextItem;
- findNext();
- return x;
- }
-
- public void remove() {
- QNode p = curr;
- if (p == null)
- throw new IllegalStateException();
- Object x = p.get();
- if (x != null && x != p && p.compareAndSet(x, p))
- clean(pcurr, p);
- }
- }
-
public E peek() {
- for (;;) {
- QNode h = traversalHead();
- QNode p = h.next;
- if (p == null)
- return null;
- Object x = p.get();
- if (p != x) {
- if (!p.isData)
- return null;
- if (x != null)
- return (E)x;
- }
- }
+ return firstDataItem();
}
+ /**
+ * Returns {@code true} if this queue contains no elements.
+ *
+ * @return {@code true} if this queue contains no elements
+ */
public boolean isEmpty() {
- for (;;) {
- QNode h = traversalHead();
- QNode p = h.next;
- if (p == null)
- return true;
- Object x = p.get();
- if (p != x) {
- if (!p.isData)
- return true;
- if (x != null)
- return false;
- }
+ for (Node p = head; p != null; p = succ(p)) {
+ if (!p.isMatched())
+ return !p.isData;
}
+ return true;
}
public boolean hasWaitingConsumer() {
- for (;;) {
- QNode h = traversalHead();
- QNode p = h.next;
- if (p == null)
- return false;
- Object x = p.get();
- if (p != x)
- return !p.isData;
- }
+ return firstOfMode(false) != null;
}
/**
@@ -696,58 +1209,64 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
* @return the number of elements in this queue
*/
public int size() {
- int count = 0;
- QNode h = traversalHead();
- for (QNode p = h.next; p != null && p.isData; p = p.next) {
- Object x = p.get();
- if (x != null && x != p) {
- if (++count == Integer.MAX_VALUE) // saturated
- break;
- }
- }
- return count;
+ return countOfMode(true);
}
public int getWaitingConsumerCount() {
- int count = 0;
- QNode h = traversalHead();
- for (QNode p = h.next; p != null && !p.isData; p = p.next) {
- if (p.get() == null) {
- if (++count == Integer.MAX_VALUE)
- break;
- }
- }
- return count;
+ return countOfMode(false);
}
- public int remainingCapacity() {
- return Integer.MAX_VALUE;
+ /**
+ * Removes a single instance of the specified element from this queue,
+ * if it is present. More formally, removes an element {@code e} such
+ * that {@code o.equals(e)}, if this queue contains one or more such
+ * elements.
+ * Returns {@code true} if this queue contained the specified element
+ * (or equivalently, if this queue changed as a result of the call).
+ *
+ * @param o element to be removed from this queue, if present
+ * @return {@code true} if this queue changed as a result of the call
+ */
+ public boolean remove(Object o) {
+ return findAndRemove(o);
}
- public boolean remove(Object o) {
- if (o == null)
- return false;
- for (;;) {
- QNode pred = traversalHead();
- for (;;) {
- QNode q = pred.next;
- if (q == null || !q.isData)
- return false;
- if (q == pred) // restart
- break;
- Object x = q.get();
- if (x != null && x != q && o.equals(x) &&
- q.compareAndSet(x, q)) {
- clean(pred, q);
+ /**
+ * Returns {@code true} if this queue contains the specified element.
+ * More formally, returns {@code true} if and only if this queue contains
+ * at least one element {@code e} such that {@code o.equals(e)}.
+ *
+ * @param o object to be checked for containment in this queue
+ * @return {@code true} if this queue contains the specified element
+ */
+ public boolean contains(Object o) {
+ if (o == null) return false;
+ for (Node p = head; p != null; p = succ(p)) {
+ Object item = p.item;
+ if (p.isData) {
+ if (item != null && item != p && o.equals(item))
return true;
- }
- pred = q;
}
+ else if (item == null)
+ break;
}
+ return false;
+ }
+
+ /**
+ * Always returns {@code Integer.MAX_VALUE} because a
+ * {@code LinkedTransferQueue} is not capacity constrained.
+ *
+ * @return {@code Integer.MAX_VALUE} (as specified by
+ * {@link java.util.concurrent.BlockingQueue#remainingCapacity()
+ * BlockingQueue.remainingCapacity})
+ */
+ public int remainingCapacity() {
+ return Integer.MAX_VALUE;
}
/**
- * Save the state to a stream (that is, serialize it).
+ * Saves the state to a stream (that is, serializes it).
*
* @serialData All of the elements (each an {@code E}) in
* the proper order, followed by a null
@@ -763,16 +1282,17 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
}
/**
- * Reconstitute the Queue instance from a stream (that is,
- * deserialize it).
+ * Reconstitutes the Queue instance from a stream (that is,
+ * deserializes it).
+ *
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
- resetHeadAndTail();
for (;;) {
- E item = (E)s.readObject();
+ @SuppressWarnings("unchecked")
+ E item = (E) s.readObject();
if (item == null)
break;
else
@@ -780,61 +1300,53 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
}
}
+ // Unsafe mechanics
- // Support for resetting head/tail while deserializing
- private void resetHeadAndTail() {
- QNode dummy = new QNode(null, false);
- _unsafe.putObjectVolatile(this, headOffset,
- new PaddedAtomicReference<QNode>(dummy));
- _unsafe.putObjectVolatile(this, tailOffset,
- new PaddedAtomicReference<QNode>(dummy));
- _unsafe.putObjectVolatile(this, cleanMeOffset,
- new PaddedAtomicReference<QNode>(null));
+ private static final sun.misc.Unsafe UNSAFE;
+ private static final long headOffset;
+ private static final long tailOffset;
+ private static final long sweepVotesOffset;
+ static {
+ try {
+ UNSAFE = getUnsafe();
+ Class<?> k = LinkedTransferQueue.class;
+ headOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("head"));
+ tailOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("tail"));
+ sweepVotesOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("sweepVotes"));
+ } catch (Exception e) {
+ throw new Error(e);
+ }
}
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ static sun.misc.Unsafe getUnsafe() {
try {
- return Unsafe.getUnsafe();
+ return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException se) {
try {
return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
+ (new java.security
+ .PrivilegedExceptionAction<sun.misc.Unsafe>() {
+ public sun.misc.Unsafe run() throws Exception {
+ java.lang.reflect.Field f = sun.misc
+ .Unsafe.class.getDeclaredField("theUnsafe");
+ f.setAccessible(true);
+ return (sun.misc.Unsafe) f.get(null);
}});
} catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
+ throw new RuntimeException("Could not initialize intrinsics",
+ e.getCause());
}
}
}
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName)
- throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
- (LinkedTransferQueue.class.getDeclaredField(fieldName));
- }
-
- private static final Unsafe _unsafe;
- private static final long headOffset;
- private static final long tailOffset;
- private static final long cleanMeOffset;
- static {
- try {
- _unsafe = getUnsafe();
- headOffset = fieldOffset("head");
- tailOffset = fieldOffset("tail");
- cleanMeOffset = fieldOffset("cleanMe");
- } catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
- }
- }
-
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
index 2d36f7eb33..1e7cdd952d 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
@@ -1,64 +1,73 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
/**
- * Recursive resultless ForkJoinTasks. This class establishes
- * conventions to parameterize resultless actions as <tt>Void</tt>
- * ForkJoinTasks. Because <tt>null</tt> is the only valid value of
- * <tt>Void</tt>, methods such as join always return <tt>null</tt>
- * upon completion.
+ * A recursive resultless {@link ForkJoinTask}. This class
+ * establishes conventions to parameterize resultless actions as
+ * {@code Void} {@code ForkJoinTask}s. Because {@code null} is the
+ * only valid value of type {@code Void}, methods such as {@code join}
+ * always return {@code null} upon completion.
*
- * <p><b>Sample Usages.</b> Here is a sketch of a ForkJoin sort that
- * sorts a given <tt>long[]</tt> array:
+ * <p><b>Sample Usages.</b> Here is a simple but complete ForkJoin
+ * sort that sorts a given {@code long[]} array:
*
- * <pre>
- * class SortTask extends RecursiveAction {
- * final long[] array; final int lo; final int hi;
+ * <pre> {@code
+ * static class SortTask extends RecursiveAction {
+ * final long[] array; final int lo, hi;
* SortTask(long[] array, int lo, int hi) {
* this.array = array; this.lo = lo; this.hi = hi;
* }
+ * SortTask(long[] array) { this(array, 0, array.length); }
* protected void compute() {
- * if (hi - lo &lt; THRESHOLD)
- * sequentiallySort(array, lo, hi);
+ * if (hi - lo < THRESHOLD)
+ * sortSequentially(lo, hi);
* else {
- * int mid = (lo + hi) &gt;&gt;&gt; 1;
+ * int mid = (lo + hi) >>> 1;
* invokeAll(new SortTask(array, lo, mid),
* new SortTask(array, mid, hi));
- * merge(array, lo, hi);
+ * merge(lo, mid, hi);
* }
* }
- * }
- * </pre>
+ * // implementation details follow:
+ * final static int THRESHOLD = 1000;
+ * void sortSequentially(int lo, int hi) {
+ * Arrays.sort(array, lo, hi);
+ * }
+ * void merge(int lo, int mid, int hi) {
+ * long[] buf = Arrays.copyOfRange(array, lo, mid);
+ * for (int i = 0, j = lo, k = mid; i < buf.length; j++)
+ * array[j] = (k == hi || buf[i] < array[k]) ?
+ * buf[i++] : array[k++];
+ * }
+ * }}</pre>
*
- * You could then sort anArray by creating <tt>new SortTask(anArray, 0,
- * anArray.length-1) </tt> and invoking it in a ForkJoinPool.
- * As a more concrete simple example, the following task increments
- * each element of an array:
- * <pre>
+ * You could then sort {@code anArray} by creating {@code new
+ * SortTask(anArray)} and invoking it in a ForkJoinPool. As a more
+ * concrete simple example, the following task increments each element
+ * of an array:
+ * <pre> {@code
* class IncrementTask extends RecursiveAction {
- * final long[] array; final int lo; final int hi;
+ * final long[] array; final int lo, hi;
* IncrementTask(long[] array, int lo, int hi) {
* this.array = array; this.lo = lo; this.hi = hi;
* }
* protected void compute() {
- * if (hi - lo &lt; THRESHOLD) {
- * for (int i = lo; i &lt; hi; ++i)
+ * if (hi - lo < THRESHOLD) {
+ * for (int i = lo; i < hi; ++i)
* array[i]++;
* }
* else {
- * int mid = (lo + hi) &gt;&gt;&gt; 1;
+ * int mid = (lo + hi) >>> 1;
* invokeAll(new IncrementTask(array, lo, mid),
* new IncrementTask(array, mid, hi));
* }
* }
- * }
- * </pre>
- *
+ * }}</pre>
*
* <p>The following example illustrates some refinements and idioms
* that may lead to better performance: RecursiveActions need not be
@@ -66,33 +75,33 @@ package scala.concurrent.forkjoin;
* divide-and-conquer approach. Here is a class that sums the squares
* of each element of a double array, by subdividing out only the
* right-hand-sides of repeated divisions by two, and keeping track of
- * them with a chain of <tt>next</tt> references. It uses a dynamic
- * threshold based on method <tt>surplus</tt>, but counterbalances
- * potential excess partitioning by directly performing leaf actions
- * on unstolen tasks rather than further subdividing.
+ * them with a chain of {@code next} references. It uses a dynamic
+ * threshold based on method {@code getSurplusQueuedTaskCount}, but
+ * counterbalances potential excess partitioning by directly
+ * performing leaf actions on unstolen tasks rather than further
+ * subdividing.
*
- * <pre>
+ * <pre> {@code
* double sumOfSquares(ForkJoinPool pool, double[] array) {
* int n = array.length;
- * int seqSize = 1 + n / (8 * pool.getParallelism());
- * Applyer a = new Applyer(array, 0, n, seqSize, null);
+ * Applyer a = new Applyer(array, 0, n, null);
* pool.invoke(a);
* return a.result;
* }
*
* class Applyer extends RecursiveAction {
* final double[] array;
- * final int lo, hi, seqSize;
+ * final int lo, hi;
* double result;
* Applyer next; // keeps track of right-hand-side tasks
- * Applyer(double[] array, int lo, int hi, int seqSize, Applyer next) {
+ * Applyer(double[] array, int lo, int hi, Applyer next) {
* this.array = array; this.lo = lo; this.hi = hi;
- * this.seqSize = seqSize; this.next = next;
+ * this.next = next;
* }
*
- * double atLeaf(int l, int r) {
+ * double atLeaf(int l, int h) {
* double sum = 0;
- * for (int i = l; i &lt; h; ++i) // perform leftmost base step
+ * for (int i = l; i < h; ++i) // perform leftmost base step
* sum += array[i] * array[i];
* return sum;
* }
@@ -101,10 +110,9 @@ package scala.concurrent.forkjoin;
* int l = lo;
* int h = hi;
* Applyer right = null;
- * while (h - l &gt; 1 &amp;&amp;
- * ForkJoinWorkerThread.getEstimatedSurplusTaskCount() &lt;= 3) {
- * int mid = (l + h) &gt;&gt;&gt; 1;
- * right = new Applyer(array, mid, h, seqSize, right);
+ * while (h - l > 1 && getSurplusQueuedTaskCount() <= 3) {
+ * int mid = (l + h) >>> 1;
+ * right = new Applyer(array, mid, h, right);
* right.fork();
* h = mid;
* }
@@ -113,17 +121,20 @@ package scala.concurrent.forkjoin;
* if (right.tryUnfork()) // directly calculate if not stolen
* sum += right.atLeaf(right.lo, right.hi);
* else {
- * right.helpJoin();
+ * right.join();
* sum += right.result;
* }
* right = right.next;
* }
* result = sum;
* }
- * }
- * </pre>
+ * }}</pre>
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public abstract class RecursiveAction extends ForkJoinTask<Void> {
+ private static final long serialVersionUID = 5232453952276485070L;
/**
* The main computation performed by this task.
@@ -131,7 +142,9 @@ public abstract class RecursiveAction extends ForkJoinTask<Void> {
protected abstract void compute();
/**
- * Always returns null
+ * Always returns {@code null}.
+ *
+ * @return {@code null} always
*/
public final Void getRawResult() { return null; }
@@ -141,7 +154,7 @@ public abstract class RecursiveAction extends ForkJoinTask<Void> {
protected final void setRawResult(Void mustBeNull) { }
/**
- * Implements execution conventions for RecursiveActions
+ * Implements execution conventions for RecursiveActions.
*/
protected final boolean exec() {
compute();
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
index a526f75597..d1e1547143 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
@@ -1,29 +1,29 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
/**
- * Recursive result-bearing ForkJoinTasks.
- * <p> For a classic example, here is a task computing Fibonacci numbers:
+ * A recursive result-bearing {@link ForkJoinTask}.
*
- * <pre>
- * class Fibonacci extends RecursiveTask&lt;Integer&gt; {
+ * <p>For a classic example, here is a task computing Fibonacci numbers:
+ *
+ * <pre> {@code
+ * class Fibonacci extends RecursiveTask<Integer> {
* final int n;
- * Fibonnaci(int n) { this.n = n; }
+ * Fibonacci(int n) { this.n = n; }
* Integer compute() {
- * if (n &lt;= 1)
+ * if (n <= 1)
* return n;
* Fibonacci f1 = new Fibonacci(n - 1);
* f1.fork();
* Fibonacci f2 = new Fibonacci(n - 2);
* return f2.compute() + f1.join();
* }
- * }
- * </pre>
+ * }}</pre>
*
* However, besides being a dumb way to compute Fibonacci functions
* (there is a simple fast linear algorithm that you'd use in
@@ -33,17 +33,14 @@ package scala.concurrent.forkjoin;
* minimum granularity size (for example 10 here) for which you always
* sequentially solve rather than subdividing.
*
+ * @since 1.7
+ * @author Doug Lea
*/
public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
+ private static final long serialVersionUID = 5232453952276485270L;
/**
- * Empty constructor for use by subclasses.
- */
- protected RecursiveTask() {
- }
-
- /**
- * The result returned by compute method.
+ * The result of the computation.
*/
V result;
@@ -61,7 +58,7 @@ public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
}
/**
- * Implements execution conventions for RecursiveTask
+ * Implements execution conventions for RecursiveTask.
*/
protected final boolean exec() {
result = compute();
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
index 34e2e37f37..19237c9092 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
@@ -1,49 +1,53 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.*;
+
+import java.util.Random;
/**
- * A random number generator with the same properties as class {@link
- * Random} but isolated to the current Thread. Like the global
- * generator used by the {@link java.lang.Math} class, a
- * ThreadLocalRandom is initialized with an internally generated seed
- * that may not otherwise be modified. When applicable, use of
- * ThreadLocalRandom rather than shared Random objects in concurrent
- * programs will typically encounter much less overhead and
- * contention. ThreadLocalRandoms are particularly appropriate when
- * multiple tasks (for example, each a {@link ForkJoinTask}), use
- * random numbers in parallel in thread pools.
+ * A random number generator isolated to the current thread. Like the
+ * global {@link java.util.Random} generator used by the {@link
+ * java.lang.Math} class, a {@code ThreadLocalRandom} is initialized
+ * with an internally generated seed that may not otherwise be
+ * modified. When applicable, use of {@code ThreadLocalRandom} rather
+ * than shared {@code Random} objects in concurrent programs will
+ * typically encounter much less overhead and contention. Use of
+ * {@code ThreadLocalRandom} is particularly appropriate when multiple
+ * tasks (for example, each a {@link ForkJoinTask}) use random numbers
+ * in parallel in thread pools.
*
* <p>Usages of this class should typically be of the form:
- * <code>ThreadLocalRandom.current().nextX(...)</code> (where
- * <code>X</code> is <code>Int</code>, <code>Long</code>, etc).
+ * {@code ThreadLocalRandom.current().nextX(...)} (where
+ * {@code X} is {@code Int}, {@code Long}, etc).
* When all usages are of this form, it is never possible to
- * accidently share ThreadLocalRandoms across multiple threads.
+ * accidently share a {@code ThreadLocalRandom} across multiple threads.
*
* <p>This class also provides additional commonly used bounded random
* generation methods.
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
- private final static long multiplier = 0x5DEECE66DL;
- private final static long addend = 0xBL;
- private final static long mask = (1L << 48) - 1;
+ private static final long multiplier = 0x5DEECE66DL;
+ private static final long addend = 0xBL;
+ private static final long mask = (1L << 48) - 1;
/**
- * The random seed. We can't use super.seed
+ * The random seed. We can't use super.seed.
*/
private long rnd;
/**
- * Initialization flag to permit the first and only allowed call
- * to setSeed (inside Random constructor) to succeed. We can't
- * allow others since it would cause setting seed in one part of a
- * program to unintentionally impact other usages by the thread.
+ * Initialization flag to permit calls to setSeed to succeed only
+ * while executing the Random constructor. We can't allow others
+ * since it would cause setting seed in one part of a program to
+ * unintentionally impact other usages by the thread.
*/
boolean initialized;
@@ -65,40 +69,42 @@ public class ThreadLocalRandom extends Random {
/**
* Constructor called only by localRandom.initialValue.
- * We rely on the fact that the superclass no-arg constructor
- * invokes setSeed exactly once to initialize.
*/
ThreadLocalRandom() {
super();
+ initialized = true;
}
/**
- * Returns the current Thread's ThreadLocalRandom
- * @return the current Thread's ThreadLocalRandom
+ * Returns the current thread's {@code ThreadLocalRandom}.
+ *
+ * @return the current thread's {@code ThreadLocalRandom}
*/
public static ThreadLocalRandom current() {
return localRandom.get();
}
/**
- * Throws UnsupportedOperationException. Setting seeds in this
- * generator is unsupported.
+ * Throws {@code UnsupportedOperationException}. Setting seeds in
+ * this generator is not supported.
+ *
* @throws UnsupportedOperationException always
*/
public void setSeed(long seed) {
if (initialized)
throw new UnsupportedOperationException();
- initialized = true;
rnd = (seed ^ multiplier) & mask;
}
protected int next(int bits) {
- return (int)((rnd = (rnd * multiplier + addend) & mask) >>> (48-bits));
+ rnd = (rnd * multiplier + addend) & mask;
+ return (int) (rnd >>> (48-bits));
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
+ *
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @throws IllegalArgumentException if least greater than or equal
@@ -113,7 +119,8 @@ public class ThreadLocalRandom extends Random {
/**
* Returns a pseudorandom, uniformly distributed value
- * between 0 (inclusive) and the specified value (exclusive)
+ * between 0 (inclusive) and the specified value (exclusive).
+ *
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
@@ -131,17 +138,18 @@ public class ThreadLocalRandom extends Random {
while (n >= Integer.MAX_VALUE) {
int bits = next(2);
long half = n >>> 1;
- long nextn = ((bits & 2) == 0)? half : n - half;
+ long nextn = ((bits & 2) == 0) ? half : n - half;
if ((bits & 1) == 0)
offset += n - nextn;
n = nextn;
}
- return offset + nextInt((int)n);
+ return offset + nextInt((int) n);
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
+ *
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
@@ -156,7 +164,8 @@ public class ThreadLocalRandom extends Random {
/**
* Returns a pseudorandom, uniformly distributed {@code double} value
- * between 0 (inclusive) and the specified value (exclusive)
+ * between 0 (inclusive) and the specified value (exclusive).
+ *
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
@@ -171,6 +180,7 @@ public class ThreadLocalRandom extends Random {
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
+ *
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
@@ -183,4 +193,5 @@ public class ThreadLocalRandom extends Random {
return nextDouble() * (bound - least) + least;
}
+ private static final long serialVersionUID = -5851777807851030925L;
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
index 9c7b2289c4..7d149c7ae5 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
@@ -1,7 +1,7 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
@@ -11,21 +11,23 @@ import java.util.concurrent.*;
* A {@link BlockingQueue} in which producers may wait for consumers
* to receive elements. A {@code TransferQueue} may be useful for
* example in message passing applications in which producers
- * sometimes (using method {@code transfer}) await receipt of
- * elements by consumers invoking {@code take} or {@code poll},
- * while at other times enqueue elements (via method {@code put})
- * without waiting for receipt. Non-blocking and time-out versions of
- * {@code tryTransfer} are also available. A TransferQueue may also
- * be queried via {@code hasWaitingConsumer} whether there are any
- * threads waiting for items, which is a converse analogy to a
- * {@code peek} operation.
+ * sometimes (using method {@link #transfer}) await receipt of
+ * elements by consumers invoking {@code take} or {@code poll}, while
+ * at other times enqueue elements (via method {@code put}) without
+ * waiting for receipt.
+ * {@linkplain #tryTransfer(Object) Non-blocking} and
+ * {@linkplain #tryTransfer(Object,long,TimeUnit) time-out} versions of
+ * {@code tryTransfer} are also available.
+ * A {@code TransferQueue} may also be queried, via {@link
+ * #hasWaitingConsumer}, whether there are any threads waiting for
+ * items, which is a converse analogy to a {@code peek} operation.
*
- * <p>Like any {@code BlockingQueue}, a {@code TransferQueue} may be
- * capacity bounded. If so, an attempted {@code transfer} operation
- * may initially block waiting for available space, and/or
- * subsequently block waiting for reception by a consumer. Note that
- * in a queue with zero capacity, such as {@link SynchronousQueue},
- * {@code put} and {@code transfer} are effectively synonymous.
+ * <p>Like other blocking queues, a {@code TransferQueue} may be
+ * capacity bounded. If so, an attempted transfer operation may
+ * initially block waiting for available space, and/or subsequently
+ * block waiting for reception by a consumer. Note that in a queue
+ * with zero capacity, such as {@link SynchronousQueue}, {@code put}
+ * and {@code transfer} are effectively synonymous.
*
* <p>This interface is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
@@ -37,9 +39,12 @@ import java.util.concurrent.*;
*/
public interface TransferQueue<E> extends BlockingQueue<E> {
/**
- * Transfers the specified element if there exists a consumer
- * already waiting to receive it, otherwise returning {@code false}
- * without enqueuing the element.
+ * Transfers the element to a waiting consumer immediately, if possible.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * otherwise returning {@code false} without enqueuing the element.
*
* @param e the element to transfer
* @return {@code true} if the element was transferred, else
@@ -53,13 +58,16 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
boolean tryTransfer(E e);
/**
- * Inserts the specified element into this queue, waiting if
- * necessary for space to become available and the element to be
- * dequeued by a consumer invoking {@code take} or {@code poll}.
+ * Transfers the element to a consumer, waiting if necessary to do so.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else waits until the element is received by a consumer.
*
* @param e the element to transfer
* @throws InterruptedException if interrupted while waiting,
- * in which case the element is not enqueued.
+ * in which case the element is not left enqueued
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this queue
* @throws NullPointerException if the specified element is null
@@ -69,10 +77,15 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
void transfer(E e) throws InterruptedException;
/**
- * Inserts the specified element into this queue, waiting up to
- * the specified wait time if necessary for space to become
- * available and the element to be dequeued by a consumer invoking
- * {@code take} or {@code poll}.
+ * Transfers the element to a consumer if it is possible to do so
+ * before the timeout elapses.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else waits until the element is received by a consumer,
+ * returning {@code false} if the specified wait time elapses
+ * before the element can be transferred.
*
* @param e the element to transfer
* @param timeout how long to wait before giving up, in units of
@@ -81,9 +94,9 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
* {@code timeout} parameter
* @return {@code true} if successful, or {@code false} if
* the specified waiting time elapses before completion,
- * in which case the element is not enqueued.
+ * in which case the element is not left enqueued
* @throws InterruptedException if interrupted while waiting,
- * in which case the element is not enqueued.
+ * in which case the element is not left enqueued
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this queue
* @throws NullPointerException if the specified element is null
@@ -95,7 +108,8 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
/**
* Returns {@code true} if there is at least one consumer waiting
- * to dequeue an element via {@code take} or {@code poll}.
+ * to receive an element via {@link #take} or
+ * timed {@link #poll(long,TimeUnit) poll}.
* The return value represents a momentary state of affairs.
*
* @return {@code true} if there is at least one waiting consumer
@@ -104,15 +118,16 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
/**
* Returns an estimate of the number of consumers waiting to
- * dequeue elements via {@code take} or {@code poll}. The return
- * value is an approximation of a momentary state of affairs, that
- * may be inaccurate if consumers have completed or given up
- * waiting. The value may be useful for monitoring and heuristics,
- * but not for synchronization control. Implementations of this
+ * receive elements via {@link #take} or timed
+ * {@link #poll(long,TimeUnit) poll}. The return value is an
+ * approximation of a momentary state of affairs, that may be
+ * inaccurate if consumers have completed or given up waiting.
+ * The value may be useful for monitoring and heuristics, but
+ * not for synchronization control. Implementations of this
* method are likely to be noticeably slower than those for
* {@link #hasWaitingConsumer}.
*
- * @return the number of consumers waiting to dequeue elements
+ * @return the number of consumers waiting to receive elements
*/
int getWaitingConsumerCount();
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/package-info.java b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
index b8fa0fad02..3561b9b44a 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/package-info.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
@@ -1,7 +1,7 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
@@ -15,7 +15,7 @@
* Threads. However, when applicable, they typically provide
* significantly greater performance on multiprocessor platforms.
*
- * <p> Candidates for fork/join processing mainly include those that
+ * <p>Candidates for fork/join processing mainly include those that
* can be expressed using parallel divide-and-conquer techniques: To
* solve a problem, break it in two (or more) parts, and then solve
* those parts in parallel, continuing on in this way until the
@@ -24,6 +24,5 @@
* available to other threads (normally one per CPU), that help
* complete the tasks. In general, the most efficient ForkJoinTasks
* are those that directly implement this algorithmic design pattern.
- *
*/
package scala.concurrent.forkjoin;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt b/src/library/rootdoc.txt
index 6145429f1e..6145429f1e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt
+++ b/src/library/rootdoc.txt
diff --git a/src/library/scala/AnyValCompanion.scala b/src/library/scala/AnyValCompanion.scala
index d6cb498185..47555938a0 100644
--- a/src/library/scala/AnyValCompanion.scala
+++ b/src/library/scala/AnyValCompanion.scala
@@ -18,4 +18,4 @@ package scala
* }}}
*
*/
-private[scala] trait AnyValCompanion extends SpecializableCompanion { }
+private[scala] trait AnyValCompanion extends Specializable { }
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 3d85f2f52f..80571943e5 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -55,7 +55,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
thisenum =>
def this() = this(0)
-
+
@deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
def this(initial: Int, names: String*) = {
this(initial)
@@ -201,7 +201,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
case _ => false
}
override def hashCode: Int = id.##
-
+
/** Create a ValueSet which contains this value and another one */
def + (v: Value) = ValueSet(this, v)
}
@@ -266,7 +266,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
* new array of longs */
def toBitMask: Array[Long] = nnIds.toBitMask
}
-
+
/** A factory object for value sets */
object ValueSet {
import generic.CanBuildFrom
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index f68bbcc454..dceed26439 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Sun Jul 31 00:37:30 CEST 2011
+// genprod generated these sources at: Tue Feb 14 16:49:03 PST 2012
package scala
@@ -17,7 +17,7 @@ package scala
* shorthand for the anonymous class definition anonfun0:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val javaVersion = () => sys.props("java.version")
*
* val anonfun0 = new Function0[String] {
@@ -26,6 +26,12 @@ package scala
* assert(javaVersion() == anonfun0())
* }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
trait Function0[@specialized +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 7517e6604b..8995ef912b 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -16,7 +16,7 @@ package scala
* shorthand for the anonymous class definition anonfun1:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
* def apply(x: Int): Int = x + 1
@@ -29,13 +29,11 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
- *
+
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
-trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
- /** Apply the body of this function to the argument. It may throw an
- * exception.
- *
+trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends AnyRef { self =>
+ /** Apply the body of this function to the argument.
* @return the result of function application.
*/
def apply(v1: T1): R
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index 6f17606afd..9e107fc53d 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -18,12 +18,10 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried
}
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 7a73bd35bf..783a86ab5d 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -18,12 +18,10 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried
}
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index c099c0436a..7f4dee6216 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -18,12 +18,10 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried
}
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index f13db28f30..23853dde69 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -18,12 +18,10 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried
}
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index d0345cc552..372f1cfafb 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -18,12 +18,10 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried
}
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index 69ff039f5b..47c7309695 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -18,12 +18,10 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried
}
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index d544d89303..8eea42de5b 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -18,12 +18,10 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried
}
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 16c71e7ada..2d93af34f2 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -18,12 +18,10 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried
}
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index dfd70c2353..ffca98c443 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -18,12 +18,10 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried
}
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 63decd03ad..f661ea7707 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -18,12 +18,10 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried
}
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index a4ad87fa97..cacb96ef5d 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -16,7 +16,7 @@ package scala
* shorthand for the anonymous class definition anonfun2:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val max = (x: Int, y: Int) => if (x < y) y else x
*
* val anonfun2 = new Function2[Int, Int, Int] {
@@ -25,18 +25,22 @@ package scala
* assert(max(0, 1) == anonfun2(0, 1))
* }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(v1: T1, v2: T2): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2) == apply(x1, x2)`
- */
- def curried: T1 => T2 => R = {
+ */ def curried: T1 => T2 => R = {
(x1: T1) => (x2: T2) => apply(x1, x2)
}
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 7219c9be81..e4fb9f280c 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -18,12 +18,10 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried
}
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index c7d55960db..9823386856 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -18,12 +18,10 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
}
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index 196421c830..e708f7f49a 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -18,12 +18,10 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried
}
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index 09a5aa5828..62a997c1b5 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -18,12 +18,10 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3) == apply(x1, x2, x3)`
- */
- def curried: T1 => T2 => T3 => R = {
+ */ def curried: T1 => T2 => T3 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3)
}
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index 00da84636a..86d2faeac8 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -18,12 +18,10 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4) == apply(x1, x2, x3, x4)`
- */
- def curried: T1 => T2 => T3 => T4 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4)
}
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index 3915048906..bd9af77f12 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -18,12 +18,10 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
}
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 183a7332e1..4f601a468c 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -18,12 +18,10 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6) == apply(x1, x2, x3, x4, x5, x6)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried
}
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 10f8e9b599..6978b6545d 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -18,12 +18,10 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7) == apply(x1, x2, x3, x4, x5, x6, x7)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried
}
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 8144b36101..903551d939 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -18,12 +18,10 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8) == apply(x1, x2, x3, x4, x5, x6, x7, x8)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried
}
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index ee04ed0915..0c273ba929 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -18,12 +18,10 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried
}
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 70caff0221..3c5d6d0d23 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -26,18 +26,18 @@ package scala
*
* {{{
* val sample = 1 to 10
- * val isEven: PartialFunction[Int, String] = {
- * case x if x % 2 == 0 => x+" is even"
+ * val isEven: PartialFunction[Int, String] = {
+ * case x if x % 2 == 0 => x+" is even"
* }
*
* // the method collect can use isDefinedAt to select which members to collect
* val evenNumbers = sample collect isEven
*
- * val isOdd: PartialFunction[Int, String] = {
- * case x if x % 2 == 1 => x+" is odd"
+ * val isOdd: PartialFunction[Int, String] = {
+ * case x if x % 2 == 1 => x+" is odd"
* }
*
- * // the method orElse allows chaining another partial function to handle
+ * // the method orElse allows chaining another partial function to handle
* // input outside the declared domain
* val numbers = sample map (isEven orElse isOdd)
* }}}
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 824e048e73..a2ee76500c 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -95,7 +95,8 @@ object Predef extends LowPriorityImplicits {
type Set[A] = immutable.Set[A]
val Map = immutable.Map
val Set = immutable.Set
- val AnyRef = new SpecializableCompanion {} // a dummy used by the specialization annotation
+ // @deprecated("Use scala.AnyRef instead", "2.10.0")
+ // def AnyRef = scala.AnyRef
// Manifest types, companions, and incantations for summoning
type ClassManifest[T] = scala.reflect.ClassManifest[T]
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
new file mode 100644
index 0000000000..67126b3069
--- /dev/null
+++ b/src/library/scala/Specializable.scala
@@ -0,0 +1,29 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** A common supertype for companions of specializable types.
+ * Should not be extended in user code.
+ */
+trait Specializable extends SpecializableCompanion
+
+object Specializable {
+ // No type parameter in @specialized annotation.
+ trait SpecializedGroup { }
+
+ // Smuggle a list of types by way of a tuple upon which Group is parameterized.
+ class Group[T >: Null](value: T) extends SpecializedGroup { }
+
+ final val Primitives = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)
+ final val Everything = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)
+ final val Bits32AndUp = new Group(Int, Long, Float, Double)
+ final val Integral = new Group(Byte, Short, Int, Long, Char)
+ final val AllNumeric = new Group(Byte, Short, Int, Long, Char, Float, Double)
+ final val BestOfBreed = new Group(Int, Double, Boolean, Unit, AnyRef)
+}
diff --git a/src/library/scala/SpecializableCompanion.scala b/src/library/scala/SpecializableCompanion.scala
index fbdf42fd0b..ec797c1f15 100644
--- a/src/library/scala/SpecializableCompanion.scala
+++ b/src/library/scala/SpecializableCompanion.scala
@@ -10,4 +10,5 @@ package scala
/** A common supertype for companion classes which specialization takes into account.
*/
+@deprecated("Use Specializable instead", "2.10.0")
private[scala] trait SpecializableCompanion
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 6116547aa2..1b01355108 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -13,7 +13,7 @@ import collection.mutable.ArrayBuffer
/** A class to support string interpolation.
* This class supports string interpolation as outlined in Scala SIP-11.
* It needs to be fully documented once the SIP is accepted.
- *
+ *
* @param parts The parts that make up the interpolated string,
* without the expressions that get inserted by interpolation.
*/
@@ -26,13 +26,13 @@ case class StringContext(parts: String*) {
* @param `args` The arguments to be checked.
* @throws An `IllegalArgumentException` if this is not the case.
*/
- def checkLengths(args: Any*): Unit =
+ def checkLengths(args: Any*): Unit =
if (parts.length != args.length + 1)
throw new IllegalArgumentException("wrong number of arguments for interpolated string")
/** The simple string interpolator.
- *
+ *
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
* @param `args` The arguments to be inserted into the resulting string.
@@ -46,30 +46,30 @@ case class StringContext(parts: String*) {
checkLengths(args: _*)
val pi = parts.iterator
val ai = args.iterator
- val bldr = new java.lang.StringBuilder(treatEscapes(pi.next))
+ val bldr = new java.lang.StringBuilder(treatEscapes(pi.next()))
while (ai.hasNext) {
bldr append ai.next
- bldr append treatEscapes(pi.next)
+ bldr append treatEscapes(pi.next())
}
bldr.toString
}
/** The formatted string interpolator.
- *
+ *
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
* Finally, if an interpolated expression is followed by a `parts` string
* that starts with a formatting specifier, the expression is formatted according to that
* specifier. All specifiers allowed in Java format strings are handled, and in the same
* way they are treated in Java.
- *
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
* @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
- *
+ *
* Note: The `f` method works by assembling a format string from all the `parts` strings and using
* `java.lang.String.format` to format all arguments with that format string. The format string is
* obtained by concatenating all `parts` strings, and performing two transformations:
@@ -89,11 +89,12 @@ case class StringContext(parts: String*) {
val bldr = new java.lang.StringBuilder
val args1 = new ArrayBuffer[Any]
def copyString(first: Boolean): Unit = {
- val str = treatEscapes(pi.next)
+ val str = treatEscapes(pi.next())
+ val strIsEmpty = str.length == 0
var start = 0
var idx = 0
if (!first) {
- if ((str charAt 0) != '%')
+ if (strIsEmpty || (str charAt 0) != '%')
bldr append "%s"
idx = 1
}
@@ -106,11 +107,11 @@ case class StringContext(parts: String*) {
}
idx += 1
}
- bldr append (str substring (start, idx))
+ if (!strIsEmpty) bldr append (str substring (start, idx))
}
copyString(first = true)
while (pi.hasNext) {
- args1 += ai.next
+ args1 += ai.next()
copyString(first = false)
}
bldr.toString format (args1: _*)
@@ -125,14 +126,14 @@ object StringContext {
* @param idx The index of the offending backslash character in `str`.
*/
class InvalidEscapeException(str: String, idx: Int)
- extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
+ extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
/** Expands standard Scala escape sequences in a string.
* Escape sequences are:
* control: `\b`, `\t`, `\n`, `\f`, `\r`
* escape: `\\`, `\"`, `\'`
* octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`.
- *
+ *
* @param A string that may contain escape sequences
* @return The string with all escape sequences expanded.
*/
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index dd6ac0cfd2..684d2266e8 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -19,7 +19,7 @@ import scala.collection.generic.{ CanBuildFrom => CBF }
* @param _1 Element 1 of this Tuple2
* @param _2 Element 2 of this Tuple2
*/
-case class Tuple2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2](_1: T1, _2: T2)
+case class Tuple2[@specialized(Int, Long, Double, Char, Boolean, AnyRef) +T1, @specialized(Int, Long, Double, Char, Boolean, AnyRef) +T2](_1: T1, _2: T2)
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 8dc180d7ab..880b645daa 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -10,22 +10,53 @@ package scala.annotation
import java.util.logging.Level
-/** An annotation for methods for which invocations might
- * be removed in the generated code.
+/** An annotation for methods whose bodies may be excluded
+ * from compiler-generated bytecode.
*
* Behavior is influenced by passing `-Xelide-below <arg>` to `scalac`.
- * Methods marked elidable will be omitted from generated code if the
- * priority given the annotation is lower than to the command line argument.
- * Examples:
- * {{{
- * import annotation.elidable._
+ * Calls to methods marked elidable (as well as the method body) will
+ * be omitted from generated code if the priority given the annotation
+ * is lower than that given on the command line.
*
- * @elidable(WARNING) def foo = log("foo")
- * @elidable(FINE) def bar = log("bar")
+ * @elidable(123) // annotation priority
+ * scalac -Xelide-below 456 // command line priority
*
- * scalac -Xelide-below=1000
- * }}}
- * @since 2.8
+ * The method call will be replaced with an expression which depends on
+ * the type of the elided expression. In decreasing order of precedence:
+ *
+ * Unit ()
+ * Boolean false
+ * T <: AnyVal 0
+ * T >: Null null
+ * T >: Nothing Predef.???
+ *
+ * Complete example:
+ {{{
+ import annotation._, elidable._
+ object Test extends App {
+ def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 }
+
+ @elidable(WARNING) def warning(msg: String) = println(msg)
+ @elidable(FINE) def debug(msg: String) = println(msg)
+ @elidable(FINE) def computedValue = expensiveComputation()
+
+ warning("Warning! Danger! Warning!")
+ debug("Debug! Danger! Debug!")
+ println("I computed a value: " + computedValue)
+ }
+ % scalac example.scala && scala Test
+ Warning! Danger! Warning!
+ Debug! Danger! Debug!
+ I computed a value: 172
+
+ // INFO lies between WARNING and FINE
+ % scalac -Xelide-below INFO example.scala && scala Test
+ Warning! Danger! Warning!
+ I computed a value: 0
+ }}}
+ *
+ * @author Paul Phillips
+ * @since 2.8
*/
final class elidable(final val level: Int) extends annotation.StaticAnnotation {}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index c837775cf9..1dcc0bdac7 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -318,7 +318,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* $orderDependent
*
* @param from the lowest index to include from this $coll.
- * @param until the highest index to EXCLUDE from this $coll.
+ * @param until the lowest index to EXCLUDE from this $coll.
* @return a $coll containing the elements greater than or equal to
* index `from` extending up to (but not including) index `until`
* of this $coll.
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index d5011fc6aa..50919e506a 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -69,7 +69,7 @@ object JavaConversions {
* @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
case _ => IteratorWrapper(it)
}
@@ -87,7 +87,7 @@ object JavaConversions {
* @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
- case JEnumerationWrapper(wrapped) => wrapped
+ case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
case _ => IteratorWrapper(it)
}
@@ -105,7 +105,7 @@ object JavaConversions {
* @return A Java Iterable view of the argument.
*/
implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
case _ => IterableWrapper(i)
}
@@ -121,7 +121,7 @@ object JavaConversions {
* @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
case _ => new IterableWrapper(it)
}
@@ -179,7 +179,7 @@ object JavaConversions {
* @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
case _ => new SeqWrapper(seq)
}
@@ -286,7 +286,7 @@ object JavaConversions {
*/
implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
//case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
case _ => new MapWrapper(m)
}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 02298ef096..b51a37cf9e 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -151,7 +151,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
def next(): Repr = {
if (!hasNext)
Iterator.empty.next
-
+
val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
val result = (self.newBuilder ++= forcedElms).result
var i = idxs.length - 2
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index b235379575..cbbedc0231 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -11,12 +11,12 @@ package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
-/**
+/**
* @define Coll mutable.SortedSet
* @define coll mutable sorted
*
* @author Lucien Pereira
- *
+ *
*/
abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] {
@@ -26,7 +26,7 @@ abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with Sorted
* is evaluated elems is cloned (which is O(n)).
*
* Fortunately GrowingBuilder comes to rescue.
- *
+ *
*/
override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty)
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index abccd91f9c..870d5534dc 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -131,7 +131,7 @@ object BitSet extends BitSetFactory[BitSet] {
* the capacity of two long values). The constructor wraps an existing
* bit mask without copying, thus exposing a mutable part of the internal
* implementation. Care needs to be taken not to modify the exposed
- * array.
+ * array.
*/
class BitSetN(val elems: Array[Long]) extends BitSet {
protected def nwords = elems.length
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 9cde20f1df..6b11371bec 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -138,8 +138,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] =
if (hash == this.hash && key == this.key ) {
- if (merger eq null) new HashMap1(key, hash, value, kv)
- else new HashMap1(key, hash, value, merger(this.kv, kv))
+ if (merger eq null) {
+ if(this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this
+ else new HashMap1(key, hash, value, kv)
+ } else new HashMap1(key, hash, value, merger(this.kv, kv))
} else {
var thatindex = (hash >>> level) & 0x1f
var thisindex = (this.hash >>> level) & 0x1f
@@ -271,13 +273,15 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
if ((bitmap & mask) != 0) {
- val elemsNew = new Array[HashMap[A,B1]](elems.length)
- Array.copy(elems, 0, elemsNew, 0, elems.length)
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.updated0(key, hash, level + 5, value, kv, merger)
- elemsNew(offset) = subNew
- new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ if(subNew eq sub) this else {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ elemsNew(offset) = subNew
+ new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ }
} else {
val elemsNew = new Array[HashMap[A,B1]](elems.length + 1)
Array.copy(elems, 0, elemsNew, 0, offset)
@@ -295,7 +299,8 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.removed0(key, hash, level + 5)
- if (subNew.isEmpty) {
+ if (subNew eq sub) this
+ else if (subNew.isEmpty) {
val bitmapNew = bitmap ^ mask
if (bitmapNew != 0) {
val elemsNew = new Array[HashMap[A,B]](elems.length - 1)
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index dd6b066878..3c9c0c2f24 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -353,19 +353,19 @@ extends AbstractMap[Int, T]
def unionWith[S >: T](that : IntMap[S], f : (Int, S, S) => S) : IntMap[S] = (this, that) match{
case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that);
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1);
else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f));
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that);
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2);
else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f));
}
else {
if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join(p1, this, p2, that);
+ else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
}
- case (IntMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x));
+ case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x));
case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
case (IntMap.Nil, x) => x;
case (x, IntMap.Nil) => x;
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 5f3f9b717f..381fcf3117 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -205,6 +205,16 @@ sealed abstract class List[+A] extends AbstractSeq[A]
these
}
+ /**
+ * @example {{{
+ * // Given a list
+ * val letters = List('a','b','c','d','e')
+ *
+ * // `slice` returns all elements beginning at index `from` and afterwards,
+ * // up until index `until` (excluding index `until`.)
+ * letters.slice(1,3) // Returns List('b','c')
+ * }}}
+ */
override def slice(from: Int, until: Int): List[A] = {
val lo = math.max(from, 0)
if (until <= lo || isEmpty) Nil
@@ -316,13 +326,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def head : B = hd
override def tail : List[B] = tl
override def isEmpty: Boolean = false
-
+
private def writeObject(out: ObjectOutputStream) {
out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization
out.writeObject(this.hd)
out.writeObject(this.tl)
}
-
+
private def readObject(in: ObjectInputStream) {
val obj = in.readObject()
if (obj == ListSerializeStart) {
@@ -330,7 +340,7 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
this.tl = in.readObject().asInstanceOf[List[B]]
} else oldReadObject(in, obj)
}
-
+
/* The oldReadObject method exists here for compatibility reasons.
* :: objects used to be serialized by serializing all the elements to
* the output stream directly, but this was broken (see SI-5374).
@@ -349,13 +359,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
current = list
}
}
-
+
private def oldWriteObject(out: ObjectOutputStream) {
var xs: List[B] = this
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
}
-
+
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 963ddac762..11b5d1e311 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -349,19 +349,19 @@ extends AbstractMap[Long, T]
def unionWith[S >: T](that : LongMap[S], f : (Long, S, S) => S) : LongMap[S] = (this, that) match{
case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that);
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1);
else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f));
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that);
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2);
else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f));
}
else {
if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join(p1, this, p2, that);
+ else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
}
- case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x));
+ case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)); // TODO: remove [S] when SI-5548 is fixed
case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
case (LongMap.Nil, x) => x;
case (x, LongMap.Nil) => x;
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 7537558f0b..b72d83f896 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -77,9 +77,9 @@ extends collection.AbstractSeq[Int]
}
final val lastElement = start + (numRangeElements - 1) * step
final val terminalElement = start + numRangeElements * step
-
+
override def last = if (isEmpty) Nil.last else lastElement
-
+
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Create a new range with the `start` and `end` values of this range and
@@ -93,14 +93,14 @@ extends collection.AbstractSeq[Int]
override def size = length
override def length = if (numRangeElements < 0) fail() else numRangeElements
-
+
private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step)
private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.")
private def validateMaxLength() {
if (numRangeElements < 0)
fail()
}
-
+
def validateRangeBoundaries(f: Int => Any): Boolean = {
validateMaxLength()
@@ -121,7 +121,7 @@ extends collection.AbstractSeq[Int]
if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
else start + (step * idx)
}
-
+
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
if (validateRangeBoundaries(f)) {
var i = start
@@ -309,7 +309,7 @@ object Range {
// number of full-sized jumps.
val hasStub = isInclusive || (gap % step != 0)
val result: Long = jumps + ( if (hasStub) 1 else 0 )
-
+
if (result > scala.Int.MaxValue) -1
else result.toInt
}
@@ -405,4 +405,3 @@ object Range {
// super.foreach(f)
}
}
- \ No newline at end of file
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 9906c9896e..83eeaa45ee 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -11,10 +11,13 @@
package scala.collection
package immutable
-/** A base class containing the implementations for `TreeMaps` and `TreeSets`.
+/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
+ *
+ * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information).
*
* @since 2.3
*/
+@deprecated("use `TreeMap` or `TreeSet` instead", "2.10")
@SerialVersionUID(8691885935445612921L)
abstract class RedBlack[A] extends Serializable {
@@ -287,5 +290,3 @@ abstract class RedBlack[A] extends Serializable {
def isBlack = true
}
}
-
-
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
new file mode 100644
index 0000000000..0f28c4997b
--- /dev/null
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -0,0 +1,485 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+import annotation.tailrec
+import annotation.meta.getter
+
+/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
+ *
+ * Implementation note: since efficiency is important for data structures this implementation
+ * uses <code>null</code> to represent empty trees. This also means pattern matching cannot
+ * easily be used. The API represented by the RedBlackTree object tries to hide these
+ * optimizations behind a reasonably clean API.
+ *
+ * @since 2.10
+ */
+private[immutable]
+object RedBlackTree {
+
+ def isEmpty(tree: Tree[_, _]): Boolean = tree eq null
+
+ def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null
+ def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match {
+ case null => None
+ case tree => Some(tree.value)
+ }
+
+ @tailrec
+ def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ val cmp = ordering.compare(x, tree.key)
+ if (cmp < 0) lookup(tree.left, x)
+ else if (cmp > 0) lookup(tree.right, x)
+ else tree
+ }
+
+ def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
+ def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v))
+ def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
+ def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
+ case (Some(from), Some(until)) => this.range(tree, from, until)
+ case (Some(from), None) => this.from(tree, from)
+ case (None, Some(until)) => this.until(tree, until)
+ case (None, None) => tree
+ }
+ def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until))
+ def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from))
+ def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to))
+ def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key))
+
+ def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n))
+ def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n))
+ def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until))
+
+ def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) throw new NoSuchElementException("empty map")
+ var result = tree
+ while (result.left ne null) result = result.left
+ result
+ }
+ def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) throw new NoSuchElementException("empty map")
+ var result = tree
+ while (result.right ne null) result = result.right
+ result
+ }
+
+ def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) {
+ if (tree.left ne null) foreach(tree.left, f)
+ f((tree.key, tree.value))
+ if (tree.right ne null) foreach(tree.right, f)
+ }
+ def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) {
+ if (tree.left ne null) foreachKey(tree.left, f)
+ f(tree.key)
+ if (tree.right ne null) foreachKey(tree.right, f)
+ }
+
+ def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree)
+ def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree)
+ def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree)
+
+ @tailrec
+ def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ val count = this.count(tree.left)
+ if (n < count) nth(tree.left, n)
+ else if (n > count) nth(tree.right, n - count - 1)
+ else tree
+ }
+
+ def isBlack(tree: Tree[_, _]) = (tree eq null) || isBlackTree(tree)
+
+ private[this] def isRedTree(tree: Tree[_, _]) = tree.isInstanceOf[RedTree[_, _]]
+ private[this] def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]]
+
+ private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black
+
+ private[this] def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) =
+ if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r)
+
+ private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = {
+ if (isRedTree(l) && isRedTree(l.left))
+ RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d))
+ else if (isRedTree(l) && isRedTree(l.right))
+ RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d))
+ else
+ mkTree(isBlack, z, zv, l, d)
+ }
+ private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1]): Tree[A, B1] = {
+ if (isRedTree(r) && isRedTree(r.left))
+ RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right))
+ else if (isRedTree(r) && isRedTree(r.right))
+ RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right))
+ else
+ mkTree(isBlack, x, xv, a, r)
+ }
+ private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v), tree.right)
+ else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v))
+ else mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ }
+
+ // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
+ private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
+ if (isRedTree(tr)) {
+ RedTree(x, xv, tl.black, tr.black)
+ } else if (isRedTree(tl.left)) {
+ RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr))
+ } else if (isRedTree(tl.right)) {
+ RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr))
+ } else {
+ BlackTree(x, xv, tl, tr)
+ }
+ } else if (isRedTree(tr)) {
+ if (isRedTree(tr.right)) {
+ RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black)
+ } else if (isRedTree(tr.left)) {
+ RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right))
+ } else {
+ BlackTree(x, xv, tl, tr)
+ }
+ } else {
+ BlackTree(x, xv, tl, tr)
+ }
+ def subl(t: Tree[A, B]) =
+ if (t.isInstanceOf[BlackTree[_, _]]) t.red
+ else sys.error("Defect: invariance violation; expected black, got "+t)
+
+ def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
+ RedTree(x, xv, tl.black, tr)
+ } else if (isBlackTree(tr)) {
+ balance(x, xv, tl, tr.red)
+ } else if (isRedTree(tr) && isBlackTree(tr.left)) {
+ RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right)))
+ } else {
+ sys.error("Defect: invariance violation")
+ }
+ def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) {
+ RedTree(x, xv, tl, tr.black)
+ } else if (isBlackTree(tl)) {
+ balance(x, xv, tl.red, tr)
+ } else if (isRedTree(tl) && isBlackTree(tl.right)) {
+ RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr))
+ } else {
+ sys.error("Defect: invariance violation")
+ }
+ def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right)
+ def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k))
+ def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq null) {
+ tr
+ } else if (tr eq null) {
+ tl
+ } else if (isRedTree(tl) && isRedTree(tr)) {
+ val bc = append(tl.right, tr.left)
+ if (isRedTree(bc)) {
+ RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right))
+ } else {
+ RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right))
+ }
+ } else if (isBlackTree(tl) && isBlackTree(tr)) {
+ val bc = append(tl.right, tr.left)
+ if (isRedTree(bc)) {
+ RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right))
+ } else {
+ balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right))
+ }
+ } else if (isRedTree(tr)) {
+ RedTree(tr.key, tr.value, append(tl, tr.left), tr.right)
+ } else if (isRedTree(tl)) {
+ RedTree(tl.key, tl.value, tl.left, append(tl.right, tr))
+ } else {
+ sys.error("unmatched tree on append: " + tl + ", " + tr)
+ }
+
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0) delLeft
+ else if (cmp > 0) delRight
+ else append(tree.left, tree.right)
+ }
+
+ private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
+ val newLeft = doFrom(tree.left, from)
+ if (newLeft eq tree.left) tree
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else rebalance(tree, newLeft, tree.right)
+ }
+ private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
+ val newRight = doTo(tree.right, to)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else rebalance(tree, tree.left, newRight)
+ }
+ private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
+ val newRight = doUntil(tree.right, until)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else rebalance(tree, tree.left, newRight)
+ }
+ private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until);
+ if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until);
+ val newLeft = doFrom(tree.left, from)
+ val newRight = doUntil(tree.right, until)
+ if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value);
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value);
+ else rebalance(tree, newLeft, newRight)
+ }
+
+ private[this] def doDrop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ if (n <= 0) return tree
+ if (n >= this.count(tree)) return null
+ val count = this.count(tree.left)
+ if (n > count) return doDrop(tree.right, n - count - 1)
+ val newLeft = doDrop(tree.left, n)
+ if (newLeft eq tree.left) tree
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else rebalance(tree, newLeft, tree.right)
+ }
+ private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ if (n <= 0) return null
+ if (n >= this.count(tree)) return tree
+ val count = this.count(tree.left)
+ if (n <= count) return doTake(tree.left, n)
+ val newRight = doTake(tree.right, n - count - 1)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else rebalance(tree, tree.left, newRight)
+ }
+ private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
+ if (tree eq null) return null
+ val count = this.count(tree.left)
+ if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1)
+ if (until <= count) return doSlice(tree.left, from, until)
+ val newLeft = doDrop(tree.left, from)
+ val newRight = doTake(tree.right, until - count - 1)
+ if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value)
+ else rebalance(tree, newLeft, newRight)
+ }
+
+ // The zipper returned might have been traversed left-most (always the left child)
+ // or right-most (always the right child). Left trees are traversed right-most,
+ // and right trees are traversed leftmost.
+
+ // Returns the zipper for the side with deepest black nodes depth, a flag
+ // indicating whether the trees were unbalanced at all, and a flag indicating
+ // whether the zipper was traversed left-most or right-most.
+
+ // If the trees were balanced, returns an empty zipper
+ private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+ // Once a side is found to be deeper, unzip it to the bottom
+ def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = {
+ val next = if (leftMost) zipper.head.left else zipper.head.right
+ next match {
+ case null => zipper
+ case node => unzip(node :: zipper, leftMost)
+ }
+ }
+
+ // Unzip left tree on the rightmost side and right tree on the leftmost side until one is
+ // found to be deeper, or the bottom is reached
+ def unzipBoth(left: Tree[A, B],
+ right: Tree[A, B],
+ leftZipper: List[Tree[A, B]],
+ rightZipper: List[Tree[A, B]],
+ smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+ if (isBlackTree(left) && isBlackTree(right)) {
+ unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1)
+ } else if (isRedTree(left) && isRedTree(right)) {
+ unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth)
+ } else if (isRedTree(right)) {
+ unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth)
+ } else if (isRedTree(left)) {
+ unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth)
+ } else if ((left eq null) && (right eq null)) {
+ (Nil, true, false, smallerDepth)
+ } else if ((left eq null) && isBlackTree(right)) {
+ val leftMost = true
+ (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth)
+ } else if (isBlackTree(left) && (right eq null)) {
+ val leftMost = false
+ (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth)
+ } else {
+ sys.error("unmatched trees in unzip: " + left + ", " + right)
+ }
+ }
+ unzipBoth(left, right, Nil, Nil, 0)
+ }
+
+ private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = {
+ // This is like drop(n-1), but only counting black nodes
+ def findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match {
+ case head :: tail if isBlackTree(head) =>
+ if (depth == 1) zipper else findDepth(tail, depth - 1)
+ case _ :: tail => findDepth(tail, depth)
+ case Nil => sys.error("Defect: unexpected empty zipper while computing range")
+ }
+
+ // Blackening the smaller tree avoids balancing problems on union;
+ // this can't be done later, though, or it would change the result of compareDepth
+ val blkNewLeft = blacken(newLeft)
+ val blkNewRight = blacken(newRight)
+ val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight)
+
+ if (levelled) {
+ BlackTree(tree.key, tree.value, blkNewLeft, blkNewRight)
+ } else {
+ val zipFrom = findDepth(zipper, smallerDepth)
+ val union = if (leftMost) {
+ RedTree(tree.key, tree.value, blkNewLeft, zipFrom.head)
+ } else {
+ RedTree(tree.key, tree.value, zipFrom.head, blkNewRight)
+ }
+ val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) =>
+ if (leftMost)
+ balanceLeft(isBlackTree(node), node.key, node.value, tree, node.right)
+ else
+ balanceRight(isBlackTree(node), node.key, node.value, node.left, tree)
+ }
+ zippedTree
+ }
+ }
+
+ /*
+ * Forcing direct fields access using the @inline annotation helps speed up
+ * various operations (especially smallest/greatest and update/delete).
+ *
+ * Unfortunately the direct field access is not guaranteed to work (but
+ * works on the current implementation of the Scala compiler).
+ *
+ * An alternative is to implement the these classes using plain old Java code...
+ */
+ sealed abstract class Tree[A, +B](
+ @(inline @getter) final val key: A,
+ @(inline @getter) final val value: B,
+ @(inline @getter) final val left: Tree[A, B],
+ @(inline @getter) final val right: Tree[A, B])
+ extends Serializable {
+ final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
+ def black: Tree[A, B]
+ def red: Tree[A, B]
+ }
+ final class RedTree[A, +B](key: A,
+ value: B,
+ left: Tree[A, B],
+ right: Tree[A, B]) extends Tree[A, B](key, value, left, right) {
+ override def black: Tree[A, B] = BlackTree(key, value, left, right)
+ override def red: Tree[A, B] = this
+ override def toString: String = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")"
+ }
+ final class BlackTree[A, +B](key: A,
+ value: B,
+ left: Tree[A, B],
+ right: Tree[A, B]) extends Tree[A, B](key, value, left, right) {
+ override def black: Tree[A, B] = this
+ override def red: Tree[A, B] = RedTree(key, value, left, right)
+ override def toString: String = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")"
+ }
+
+ object RedTree {
+ @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right)
+ def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right))
+ }
+ object BlackTree {
+ @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right)
+ def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right))
+ }
+
+ private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] {
+ protected[this] def nextResult(tree: Tree[A, B]): R
+
+ override def hasNext: Boolean = next ne null
+
+ override def next: R = next match {
+ case null =>
+ throw new NoSuchElementException("next on empty iterator")
+ case tree =>
+ next = findNext(tree.right)
+ nextResult(tree)
+ }
+
+ @tailrec
+ private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) popPath()
+ else if (tree.left eq null) tree
+ else {
+ pushPath(tree)
+ findNext(tree.left)
+ }
+ }
+
+ private[this] def pushPath(tree: Tree[A, B]) {
+ try {
+ path(index) = tree
+ index += 1
+ } catch {
+ case _: ArrayIndexOutOfBoundsException =>
+ /*
+ * Either the tree became unbalanced or we calculated the maximum height incorrectly.
+ * To avoid crashing the iterator we expand the path array. Obviously this should never
+ * happen...
+ *
+ * An exception handler is used instead of an if-condition to optimize the normal path.
+ * This makes a large difference in iteration speed!
+ */
+ assert(index >= path.length)
+ path :+= null
+ pushPath(tree)
+ }
+ }
+ private[this] def popPath(): Tree[A, B] = if (index == 0) null else {
+ index -= 1
+ path(index)
+ }
+
+ private[this] var path = if (tree eq null) null else {
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * the maximum height of a red-black tree is 2*log_2(n + 2) - 2.
+ *
+ * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1))
+ *
+ * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one.
+ */
+ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1
+ new Array[Tree[A, B]](maximumHeight)
+ }
+ private[this] var index = 0
+ private[this] var next: Tree[A, B] = findNext(tree)
+ }
+
+ private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) {
+ override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value)
+ }
+
+ private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) {
+ override def nextResult(tree: Tree[A, B]) = tree.key
+ }
+
+ private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) {
+ override def nextResult(tree: Tree[A, B]) = tree.value
+ }
+}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index ef0eac3701..dc4f79be35 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -12,6 +12,7 @@ package scala.collection
package immutable
import generic._
+import immutable.{RedBlackTree => RB}
import mutable.Builder
import annotation.bridge
@@ -23,7 +24,6 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord)
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B]
- private def make[A, B](s: Int, t: RedBlack[A]#Tree[B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord)
}
/** This class implements immutable maps using a tree.
@@ -46,31 +46,79 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit val ordering: Ordering[A])
- extends RedBlack[A]
- with SortedMap[A, B]
+class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
+ extends SortedMap[A, B]
with SortedMapLike[A, B, TreeMap[A, B]]
with MapLike[A, B, TreeMap[A, B]]
with Serializable {
+ @deprecated("use `ordering.lt` instead", "2.10")
def isSmaller(x: A, y: A) = ordering.lt(x, y)
override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] =
TreeMap.newBuilder[A, B]
- def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering)
+ override def size = RB.count(tree)
- protected val tree: RedBlack[A]#Tree[B] = if (size == 0) Empty else t
+ def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
- override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = {
- val ntree = tree.range(from,until)
- new TreeMap[A,B](ntree.count, ntree)
- }
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMap[A, B](RB.rangeImpl(tree, from, until))
+ override def range(from: A, until: A): TreeMap[A, B] = new TreeMap[A, B](RB.range(tree, from, until))
+ override def from(from: A): TreeMap[A, B] = new TreeMap[A, B](RB.from(tree, from))
+ override def to(to: A): TreeMap[A, B] = new TreeMap[A, B](RB.to(tree, to))
+ override def until(until: A): TreeMap[A, B] = new TreeMap[A, B](RB.until(tree, until))
- override def firstKey = t.first
- override def lastKey = t.last
+ override def firstKey = RB.smallest(tree).key
+ override def lastKey = RB.greatest(tree).key
override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1)
+ override def head = {
+ val smallest = RB.smallest(tree)
+ (smallest.key, smallest.value)
+ }
+ override def headOption = if (RB.isEmpty(tree)) None else Some(head)
+ override def last = {
+ val greatest = RB.greatest(tree)
+ (greatest.key, greatest.value)
+ }
+ override def lastOption = if (RB.isEmpty(tree)) None else Some(last)
+
+ override def tail = new TreeMap(RB.delete(tree, firstKey))
+ override def init = new TreeMap(RB.delete(tree, lastKey))
+
+ override def drop(n: Int) = {
+ if (n <= 0) this
+ else if (n >= size) empty
+ else new TreeMap(RB.drop(tree, n))
+ }
+
+ override def take(n: Int) = {
+ if (n <= 0) empty
+ else if (n >= size) this
+ else new TreeMap(RB.take(tree, n))
+ }
+
+ override def slice(from: Int, until: Int) = {
+ if (until <= from) empty
+ else if (from <= 0) take(until)
+ else if (until >= size) drop(from)
+ else new TreeMap(RB.slice(tree, from, until))
+ }
+
+ override def dropRight(n: Int) = take(size - n)
+ override def takeRight(n: Int) = drop(size - n)
+ override def splitAt(n: Int) = (take(n), drop(n))
+
+ private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
+ var result = 0
+ val it = iterator
+ while (it.hasNext && p(it.next)) result += 1
+ result
+ }
+ override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p))
+ override def takeWhile(p: ((A, B)) => Boolean) = take(countWhile(p))
+ override def span(p: ((A, B)) => Boolean) = splitAt(countWhile(p))
+
/** A factory to create empty maps of the same type of keys.
*/
override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering)
@@ -84,10 +132,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
- val newsize = if (tree.lookup(key).isEmpty) size + 1 else size
- TreeMap.make(newsize, tree.update(key, value))
- }
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -128,14 +173,13 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* @return a new $coll with the inserted binding, if it wasn't present in the map
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
- assert(tree.lookup(key).isEmpty)
- TreeMap.make(size + 1, tree.update(key, value))
+ assert(!RB.contains(tree, key))
+ new TreeMap(RB.update(tree, key, value))
}
def - (key:A): TreeMap[A, B] =
- if (tree.lookup(key).isEmpty) this
- else if (size == 1) empty
- else TreeMap.make(size - 1, tree.delete(key))
+ if (!RB.contains(tree, key)) this
+ else new TreeMap(RB.delete(tree, key))
/** Check if this map maps `key` to a value and return the
* value if it exists.
@@ -143,21 +187,22 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* @param key the key of the mapping of interest
* @return the value of the mapping, if it exists
*/
- override def get(key: A): Option[B] = tree.lookup(key) match {
- case n: NonEmpty[b] => Some(n.value)
- case _ => None
- }
+ override def get(key: A): Option[B] = RB.get(tree, key)
/** Creates a new iterator over all elements contained in this
* object.
*
* @return the new iterator
*/
- def iterator: Iterator[(A, B)] = tree.toStream.iterator
+ override def iterator: Iterator[(A, B)] = RB.iterator(tree)
+
+ override def keysIterator: Iterator[A] = RB.keysIterator(tree)
+ override def valuesIterator: Iterator[B] = RB.valuesIterator(tree)
- override def toStream: Stream[(A, B)] = tree.toStream
+ override def contains(key: A): Boolean = RB.contains(tree, key)
+ override def isDefinedAt(key: A): Boolean = RB.contains(tree, key)
- override def foreach[U](f : ((A,B)) => U) = tree foreach { case (x, y) => f(x, y) }
+ override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f)
}
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 8b90ece143..1b3d72ceb7 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -12,6 +12,7 @@ package scala.collection
package immutable
import generic._
+import immutable.{RedBlackTree => RB}
import mutable.{ Builder, SetBuilder }
/** $factoryInfo
@@ -46,20 +47,61 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@SerialVersionUID(-234066569443569402L)
-class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
- (implicit val ordering: Ordering[A])
- extends RedBlack[A] with SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable {
+@SerialVersionUID(-5685982407650748405L)
+class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A])
+ extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable {
override def stringPrefix = "TreeSet"
- def isSmaller(x: A, y: A) = compare(x,y) < 0
+ override def size = RB.count(tree)
+
+ override def head = RB.smallest(tree).key
+ override def headOption = if (RB.isEmpty(tree)) None else Some(head)
+ override def last = RB.greatest(tree).key
+ override def lastOption = if (RB.isEmpty(tree)) None else Some(last)
+
+ override def tail = new TreeSet(RB.delete(tree, firstKey))
+ override def init = new TreeSet(RB.delete(tree, lastKey))
+
+ override def drop(n: Int) = {
+ if (n <= 0) this
+ else if (n >= size) empty
+ else newSet(RB.drop(tree, n))
+ }
- def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering)
+ override def take(n: Int) = {
+ if (n <= 0) empty
+ else if (n >= size) this
+ else newSet(RB.take(tree, n))
+ }
- protected val tree: RedBlack[A]#Tree[Unit] = if (size == 0) Empty else t
+ override def slice(from: Int, until: Int) = {
+ if (until <= from) empty
+ else if (from <= 0) take(until)
+ else if (until >= size) drop(from)
+ else newSet(RB.slice(tree, from, until))
+ }
- private def newSet(s: Int, t: RedBlack[A]#Tree[Unit]) = new TreeSet[A](s, t)
+ override def dropRight(n: Int) = take(size - n)
+ override def takeRight(n: Int) = drop(size - n)
+ override def splitAt(n: Int) = (take(n), drop(n))
+
+ private[this] def countWhile(p: A => Boolean): Int = {
+ var result = 0
+ val it = iterator
+ while (it.hasNext && p(it.next)) result += 1
+ result
+ }
+ override def dropWhile(p: A => Boolean) = drop(countWhile(p))
+ override def takeWhile(p: A => Boolean) = take(countWhile(p))
+ override def span(p: A => Boolean) = splitAt(countWhile(p))
+
+ @deprecated("use `ordering.lt` instead", "2.10")
+ def isSmaller(x: A, y: A) = compare(x,y) < 0
+
+ def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
+
+ private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t)
/** A factory to create empty sets of the same type of keys.
*/
@@ -70,10 +112,7 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = {
- val newsize = if (tree.lookup(elem).isEmpty) size + 1 else size
- newSet(newsize, tree.update(elem, ()))
- }
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, ()))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -82,8 +121,8 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
def insert(elem: A): TreeSet[A] = {
- assert(tree.lookup(elem).isEmpty)
- newSet(size + 1, tree.update(elem, ()))
+ assert(!RB.contains(tree, elem))
+ newSet(RB.update(tree, elem, ()))
}
/** Creates a new `TreeSet` with the entry removed.
@@ -92,31 +131,31 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
* @return a new $coll containing all the elements of this $coll except `elem`.
*/
def - (elem:A): TreeSet[A] =
- if (tree.lookup(elem).isEmpty) this
- else newSet(size - 1, tree delete elem)
+ if (!RB.contains(tree, elem)) this
+ else newSet(RB.delete(tree, elem))
/** Checks if this set contains element `elem`.
*
* @param elem the element to check for membership.
* @return true, iff `elem` is contained in this set.
*/
- def contains(elem: A): Boolean = !tree.lookup(elem).isEmpty
+ def contains(elem: A): Boolean = RB.contains(tree, elem)
/** Creates a new iterator over all elements contained in this
* object.
*
* @return the new iterator
*/
- def iterator: Iterator[A] = tree.toStream.iterator map (_._1)
+ def iterator: Iterator[A] = RB.keysIterator(tree)
- override def toStream: Stream[A] = tree.toStream map (_._1)
+ override def foreach[U](f: A => U) = RB.foreachKey(tree, f)
- override def foreach[U](f: A => U) = tree foreach { (x, y) => f(x) }
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSet(RB.rangeImpl(tree, from, until))
+ override def range(from: A, until: A): TreeSet[A] = newSet(RB.range(tree, from, until))
+ override def from(from: A): TreeSet[A] = newSet(RB.from(tree, from))
+ override def to(to: A): TreeSet[A] = newSet(RB.to(tree, to))
+ override def until(until: A): TreeSet[A] = newSet(RB.until(tree, until))
- override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = {
- val tree = this.tree.range(from, until)
- newSet(tree.count, tree)
- }
- override def firstKey = tree.first
- override def lastKey = tree.last
+ override def firstKey = head
+ override def lastKey = last
}
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index ba2af8f120..9aea25f330 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -12,9 +12,9 @@ package mutable
/**
* An immutable AVL Tree implementation used by mutable.TreeSet
- *
+ *
* @author Lucien Pereira
- *
+ *
*/
private[mutable] sealed trait AVLTree[+A] extends Serializable {
def balance: Int
@@ -28,28 +28,28 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
/**
* Returns a new tree containing the given element.
* Thows an IllegalArgumentException if element is already present.
- *
+ *
*/
def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
/**
* Return a new tree which not contains given element.
- *
+ *
*/
def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
throw new NoSuchElementException(String.valueOf(value))
-
+
/**
* Return a tuple containing the smallest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
+
/**
* Return a tuple containing the biggest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
@@ -90,7 +90,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Returns a new tree containing the given element.
* Thows an IllegalArgumentException if element is already present.
- *
+ *
*/
override def insert[B >: A](value: B, ordering: Ordering[B]) = {
val ord = ordering.compare(value, data)
@@ -104,7 +104,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a new tree which not contains given element.
- *
+ *
*/
override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
val ord = ordering.compare(value, data)
@@ -130,7 +130,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a tuple containing the smallest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
override def removeMin[B >: A]: (B, AVLTree[B]) = {
if (Leaf == left)
@@ -144,7 +144,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a tuple containing the biggest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
override def removeMax[B >: A]: (B, AVLTree[B]) = {
if (Leaf == right)
@@ -154,7 +154,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
(max, Node(data, left, newRight).rebalance)
}
}
-
+
override def rebalance[B >: A] = {
if (-2 == balance) {
if (1 == left.balance)
diff --git a/src/library/scala/collection/mutable/BasicNode.java b/src/library/scala/collection/mutable/BasicNode.java
new file mode 100644
index 0000000000..c05009470a
--- /dev/null
+++ b/src/library/scala/collection/mutable/BasicNode.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+
+
+
+public abstract class BasicNode {
+
+ public abstract String string(int lev);
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/CNodeBase.java b/src/library/scala/collection/mutable/CNodeBase.java
new file mode 100644
index 0000000000..4374943b8d
--- /dev/null
+++ b/src/library/scala/collection/mutable/CNodeBase.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
+
+
+
+abstract class CNodeBase<K, V> extends MainNode<K, V> {
+
+ public static final AtomicIntegerFieldUpdater<CNodeBase> updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
+
+ public volatile int csize = -1;
+
+ public boolean CAS_SIZE(int oldval, int nval) {
+ return updater.compareAndSet(this, oldval, nval);
+ }
+
+ public void WRITE_SIZE(int nval) {
+ updater.set(this, nval);
+ }
+
+ public int READ_SIZE() {
+ return updater.get(this);
+ }
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala
new file mode 100644
index 0000000000..cbec118aa9
--- /dev/null
+++ b/src/library/scala/collection/mutable/Ctrie.scala
@@ -0,0 +1,1075 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+
+
+import java.util.concurrent.atomic._
+import collection.immutable.{ ListMap => ImmutableListMap }
+import collection.parallel.mutable.ParCtrie
+import generic._
+import annotation.tailrec
+import annotation.switch
+
+
+
+private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
+ import INodeBase._
+
+ WRITE(bn)
+
+ def this(g: Gen) = this(null, g)
+
+ @inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
+
+ @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
+
+ final def gcasRead(ct: Ctrie[K, V]): MainNode[K, V] = GCAS_READ(ct)
+
+ @inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = {
+ val m = /*READ*/mainnode
+ val prevval = /*READ*/m.prev
+ if (prevval eq null) m
+ else GCAS_Complete(m, ct)
+ }
+
+ @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else {
+ // complete the GCAS
+ val prev = /*READ*/m.prev
+ val ctr = ct.readRoot(true)
+
+ prev match {
+ case null =>
+ m
+ case fn: FailedNode[_, _] => // try to commit to previous value
+ if (CAS(m, fn.prev)) fn.prev
+ else GCAS_Complete(/*READ*/mainnode, ct)
+ case vn: MainNode[_, _] =>
+ // Assume that you've read the root from the generation G.
+ // Assume that the snapshot algorithm is correct.
+ // ==> you can only reach nodes in generations <= G.
+ // ==> `gen` is <= G.
+ // We know that `ctr.gen` is >= G.
+ // ==> if `ctr.gen` = `gen` then they are both equal to G.
+ // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G,
+ // or both
+ if ((ctr.gen eq gen) && ct.nonReadOnly) {
+ // try to commit
+ if (m.CAS_PREV(prev, null)) m
+ else GCAS_Complete(m, ct)
+ } else {
+ // try to abort
+ m.CAS_PREV(prev, new FailedNode(prev))
+ GCAS_Complete(/*READ*/mainnode, ct)
+ }
+ }
+ }
+
+ @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = {
+ n.WRITE_PREV(old)
+ if (CAS(old, n)) {
+ GCAS_Complete(n, ct)
+ /*READ*/n.prev eq null
+ } else false
+ }
+
+ @inline private def inode(cn: MainNode[K, V]) = {
+ val nin = new INode[K, V](gen)
+ nin.WRITE(cn)
+ nin
+ }
+
+ final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = {
+ val nin = new INode[K, V](ngen)
+ val main = GCAS_READ(ct)
+ nin.WRITE(main)
+ nin
+ }
+
+ /** Inserts a key value pair, overwriting the old pair if the keys match.
+ *
+ * @return true if successful, false otherwise
+ */
+ @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multiway node
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ val mask = flag - 1
+ val pos = Integer.bitCount(bmp & mask)
+ if ((bmp & flag) != 0) {
+ // 1a) insert below
+ cn.array(pos) match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct)
+ else false
+ }
+ case sn: SNode[K, V] =>
+ if (sn.hc == hc && sn.k == k) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)
+ else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ GCAS(cn, nn, ct)
+ }
+ }
+ } else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen)
+ GCAS(cn, ncnode, ct)
+ }
+ case tn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ false
+ case ln: LNode[K, V] => // 3) an l-node
+ val nn = ln.inserted(k, v)
+ GCAS(ln, nn, ct)
+ }
+ }
+
+ /** Inserts a new key value pair, given that a specific condition is met.
+ *
+ * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
+ * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key)
+ */
+ @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multiway node
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ val mask = flag - 1
+ val pos = Integer.bitCount(bmp & mask)
+ if ((bmp & flag) != 0) {
+ // 1a) insert below
+ cn.array(pos) match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct)
+ else null
+ }
+ case sn: SNode[K, V] => cond match {
+ case null =>
+ if (sn.hc == hc && sn.k == k) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ if (GCAS(cn, nn, ct)) None
+ else null
+ }
+ case INode.KEY_ABSENT =>
+ if (sn.hc == hc && sn.k == k) Some(sn.v)
+ else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ if (GCAS(cn, nn, ct)) None
+ else null
+ }
+ case INode.KEY_PRESENT =>
+ if (sn.hc == hc && sn.k == k) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else None
+ case otherv: V =>
+ if (sn.hc == hc && sn.k == k && sn.v == otherv) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else None
+ }
+ }
+ } else cond match {
+ case null | INode.KEY_ABSENT =>
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen)
+ if (GCAS(cn, ncnode, ct)) None else null
+ case INode.KEY_PRESENT => None
+ case otherv: V => None
+ }
+ case sn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ null
+ case ln: LNode[K, V] => // 3) an l-node
+ @inline def insertln() = {
+ val nn = ln.inserted(k, v)
+ GCAS(ln, nn, ct)
+ }
+ cond match {
+ case null =>
+ val optv = ln.get(k)
+ if (insertln()) optv else null
+ case INode.KEY_ABSENT =>
+ ln.get(k) match {
+ case None => if (insertln()) None else null
+ case optv => optv
+ }
+ case INode.KEY_PRESENT =>
+ ln.get(k) match {
+ case Some(v0) => if (insertln()) Some(v0) else null
+ case None => None
+ }
+ case otherv: V =>
+ ln.get(k) match {
+ case Some(v0) if v0 == otherv => if (insertln()) Some(otherv) else null
+ case _ => None
+ }
+ }
+ }
+ }
+
+ /** Looks up the value associated with the key.
+ *
+ * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
+ */
+ @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multinode
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ if ((bmp & flag) == 0) null // 1a) bitmap shows no binding
+ else { // 1b) bitmap contains a value - descend
+ val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ sub match {
+ case in: INode[K, V] =>
+ if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct)
+ else return RESTART // used to be throw RestartException
+ }
+ case sn: SNode[K, V] => // 2) singleton node
+ if (sn.hc == hc && sn.k == k) sn.v.asInstanceOf[AnyRef]
+ else null
+ }
+ }
+ case tn: TNode[K, V] => // 3) non-live node
+ def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) {
+ clean(parent, ct, lev - 5)
+ RESTART // used to be throw RestartException
+ } else {
+ if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef]
+ else null
+ }
+ cleanReadOnly(tn)
+ case ln: LNode[K, V] => // 5) an l-node
+ ln.get(k).asInstanceOf[Option[AnyRef]].orNull
+ }
+ }
+
+ /** Removes the key associated with the given value.
+ *
+ * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
+ * @return null if not successful, an Option[V] indicating the previous value otherwise
+ */
+ final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] =>
+ val idx = (hc >>> lev) & 0x1f
+ val bmp = cn.bitmap
+ val flag = 1 << idx
+ if ((bmp & flag) == 0) None
+ else {
+ val pos = Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ val res = sub match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct)
+ else null
+ }
+ case sn: SNode[K, V] =>
+ if (sn.hc == hc && sn.k == k && (v == null || sn.v == v)) {
+ val ncn = cn.removedAt(pos, flag, gen).toContracted(lev)
+ if (GCAS(cn, ncn, ct)) Some(sn.v) else null
+ } else None
+ }
+
+ if (res == None || (res eq null)) res
+ else {
+ @tailrec def cleanParent(nonlive: AnyRef) {
+ val pm = parent.GCAS_READ(ct)
+ pm match {
+ case cn: CNode[K, V] =>
+ val idx = (hc >>> (lev - 5)) & 0x1f
+ val bmp = cn.bitmap
+ val flag = 1 << idx
+ if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done
+ else {
+ val pos = Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ if (sub eq this) nonlive match {
+ case tn: TNode[K, V] =>
+ val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5)
+ if (!parent.GCAS(cn, ncn, ct))
+ if (ct.readRoot().gen == startgen) cleanParent(nonlive)
+ }
+ }
+ case _ => // parent is no longer a cnode, we're done
+ }
+ }
+
+ if (parent ne null) { // never tomb at root
+ val n = GCAS_READ(ct)
+ if (n.isInstanceOf[TNode[_, _]])
+ cleanParent(n)
+ }
+
+ res
+ }
+ }
+ case tn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ null
+ case ln: LNode[K, V] =>
+ if (v == null) {
+ val optv = ln.get(k)
+ val nn = ln.removed(k)
+ if (GCAS(ln, nn, ct)) optv else null
+ } else ln.get(k) match {
+ case optv @ Some(v0) if v0 == v =>
+ val nn = ln.removed(k)
+ if (GCAS(ln, nn, ct)) optv else null
+ case _ => None
+ }
+ }
+ }
+
+ private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) {
+ val m = nd.GCAS_READ(ct)
+ m match {
+ case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct)
+ case _ =>
+ }
+ }
+
+ final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null
+
+ final def cachedSize(ct: Ctrie[K, V]): Int = {
+ val m = GCAS_READ(ct)
+ m.cachedSize(ct)
+ }
+
+ /* this is a quiescent method! */
+ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match {
+ case null => "<null>"
+ case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc)
+ case cn: CNode[_, _] => cn.string(lev)
+ case ln: LNode[_, _] => ln.string(lev)
+ case x => "<elem: %s>".format(x)
+ })
+
+}
+
+
+private[mutable] object INode {
+ val KEY_PRESENT = new AnyRef
+ val KEY_ABSENT = new AnyRef
+
+ def newRootNode[K, V] = {
+ val gen = new Gen
+ val cn = new CNode[K, V](0, new Array(0), gen)
+ new INode[K, V](cn, gen)
+ }
+}
+
+
+private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] {
+ WRITE_PREV(p)
+
+ def string(lev: Int) = throw new UnsupportedOperationException
+
+ def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException
+
+ override def toString = "FailedNode(%s)".format(p)
+}
+
+
+private[mutable] trait KVNode[K, V] {
+ def kvPair: (K, V)
+}
+
+
+private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int)
+extends BasicNode with KVNode[K, V] {
+ final def copy = new SNode(k, v, hc)
+ final def copyTombed = new TNode(k, v, hc)
+ final def copyUntombed = new SNode(k, v, hc)
+ final def kvPair = (k, v)
+ final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc)
+}
+
+
+private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int)
+extends MainNode[K, V] with KVNode[K, V] {
+ final def copy = new TNode(k, v, hc)
+ final def copyTombed = new TNode(k, v, hc)
+ final def copyUntombed = new SNode(k, v, hc)
+ final def kvPair = (k, v)
+ final def cachedSize(ct: AnyRef): Int = 1
+ final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc)
+}
+
+
+private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V])
+extends MainNode[K, V] {
+ def this(k: K, v: V) = this(ImmutableListMap(k -> v))
+ def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
+ def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
+ def removed(k: K): MainNode[K, V] = {
+ val updmap = listmap - k
+ if (updmap.size > 1) new LNode(updmap)
+ else {
+ val (k, v) = updmap.iterator.next
+ new TNode(k, v, Ctrie.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
+ }
+ }
+ def get(k: K) = listmap.get(k)
+ def cachedSize(ct: AnyRef): Int = listmap.size
+ def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", "))
+}
+
+
+private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen)
+extends CNodeBase[K, V] {
+
+ // this should only be called from within read-only snapshots
+ final def cachedSize(ct: AnyRef) = {
+ val currsz = READ_SIZE()
+ if (currsz != -1) currsz
+ else {
+ val sz = computeSize(ct.asInstanceOf[Ctrie[K, V]])
+ while (READ_SIZE() == -1) CAS_SIZE(-1, sz)
+ READ_SIZE()
+ }
+ }
+
+ // lends itself towards being parallelizable by choosing
+ // a random starting offset in the array
+ // => if there are concurrent size computations, they start
+ // at different positions, so they are more likely to
+ // to be independent
+ private def computeSize(ct: Ctrie[K, V]): Int = {
+ var i = 0
+ var sz = 0
+ val offset = math.abs(util.Random.nextInt()) % array.length
+ while (i < array.length) {
+ val pos = (i + offset) % array.length
+ array(pos) match {
+ case sn: SNode[_, _] => sz += 1
+ case in: INode[K, V] => sz += in.cachedSize(ct)
+ }
+ i += 1
+ }
+ sz
+ }
+
+ final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
+ val len = array.length
+ val narr = new Array[BasicNode](len)
+ Array.copy(array, 0, narr, 0, len)
+ narr(pos) = nn
+ new CNode[K, V](bitmap, narr, gen)
+ }
+
+ final def removedAt(pos: Int, flag: Int, gen: Gen) = {
+ val arr = array
+ val len = arr.length
+ val narr = new Array[BasicNode](len - 1)
+ Array.copy(arr, 0, narr, 0, pos)
+ Array.copy(arr, pos + 1, narr, pos, len - pos - 1)
+ new CNode[K, V](bitmap ^ flag, narr, gen)
+ }
+
+ final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
+ val len = array.length
+ val bmp = bitmap
+ val narr = new Array[BasicNode](len + 1)
+ Array.copy(array, 0, narr, 0, pos)
+ narr(pos) = nn
+ Array.copy(array, pos, narr, pos + 1, len - pos)
+ new CNode[K, V](bmp | flag, narr, gen)
+ }
+
+ /** Returns a copy of this cnode such that all the i-nodes below it are copied
+ * to the specified generation `ngen`.
+ */
+ final def renewed(ngen: Gen, ct: Ctrie[K, V]) = {
+ var i = 0
+ val arr = array
+ val len = arr.length
+ val narr = new Array[BasicNode](len)
+ while (i < len) {
+ arr(i) match {
+ case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct)
+ case bn: BasicNode => narr(i) = bn
+ }
+ i += 1
+ }
+ new CNode[K, V](bitmap, narr, ngen)
+ }
+
+ private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match {
+ case tn: TNode[_, _] => tn.copyUntombed
+ case _ => inode
+ }
+
+ final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
+ case sn: SNode[K, V] => sn.copyTombed
+ case _ => this
+ } else this
+
+ // - if the branching factor is 1 for this CNode, and the child
+ // is a tombed SNode, returns its tombed version
+ // - otherwise, if there is at least one non-null node below,
+ // returns the version of this node with at least some null-inodes
+ // removed (those existing when the op began)
+ // - if there are only null-i-nodes below, returns null
+ final def toCompressed(ct: Ctrie[K, V], lev: Int, gen: Gen) = {
+ var bmp = bitmap
+ var i = 0
+ val arr = array
+ val tmparray = new Array[BasicNode](arr.length)
+ while (i < arr.length) { // construct new bitmap
+ val sub = arr(i)
+ sub match {
+ case in: INode[K, V] =>
+ val inodemain = in.gcasRead(ct)
+ assert(inodemain ne null)
+ tmparray(i) = resurrect(in, inodemain)
+ case sn: SNode[K, V] =>
+ tmparray(i) = sn
+ }
+ i += 1
+ }
+
+ new CNode[K, V](bmp, tmparray, gen).toContracted(lev)
+ }
+
+ private[mutable] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
+
+ /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
+ protected def collectElems: Seq[(K, V)] = array flatMap {
+ case sn: SNode[K, V] => Some(sn.kvPair)
+ case in: INode[K, V] => in.mainnode match {
+ case tn: TNode[K, V] => Some(tn.kvPair)
+ case ln: LNode[K, V] => ln.listmap.toList
+ case cn: CNode[K, V] => cn.collectElems
+ }
+ }
+
+ protected def collectLocalElems: Seq[String] = array flatMap {
+ case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
+ case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
+ }
+
+ override def toString = {
+ val elems = collectLocalElems
+ "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", "))
+ }
+}
+
+
+private[mutable] object CNode {
+
+ def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) {
+ val xidx = (xhc >>> lev) & 0x1f
+ val yidx = (yhc >>> lev) & 0x1f
+ val bmp = (1 << xidx) | (1 << yidx)
+ if (xidx == yidx) {
+ val subinode = new INode[K, V](gen)//(Ctrie.inodeupdater)
+ subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen)
+ new CNode(bmp, Array(subinode), gen)
+ } else {
+ if (xidx < yidx) new CNode(bmp, Array(x, y), gen)
+ else new CNode(bmp, Array(y, x), gen)
+ }
+ } else {
+ new LNode(x.k, x.v, y.k, y.v)
+ }
+
+}
+
+
+private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) {
+ @volatile var committed = false
+}
+
+
+/** A concurrent hash-trie or Ctrie is a concurrent thread-safe lock-free
+ * implementation of a hash array mapped trie. It is used to implement the
+ * concurrent map abstraction. It has particularly scalable concurrent insert
+ * and remove operations and is memory-efficient. It supports O(1), atomic,
+ * lock-free snapshots which are used to implement linearizable lock-free size,
+ * iterator and clear operations. The cost of evaluating the (lazy) snapshot is
+ * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable.
+ *
+ * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.10
+ */
+@SerialVersionUID(0L - 6402774413839597105L)
+final class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef])
+extends ConcurrentMap[K, V]
+ with MapLike[K, V, Ctrie[K, V]]
+ with CustomParallelizable[(K, V), ParCtrie[K, V]]
+ with Serializable
+{
+ import Ctrie.computeHash
+
+ private var rootupdater = rtupd
+ @volatile var root = r
+
+ def this() = this(
+ INode.newRootNode,
+ AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
+ )
+
+ /* internal methods */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ val it = iterator
+ while (it.hasNext) {
+ val (k, v) = it.next()
+ out.writeObject(k)
+ out.writeObject(v)
+ }
+ out.writeObject(CtrieSerializationEnd)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ root = INode.newRootNode
+ rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
+
+ var obj: AnyRef = null
+ do {
+ obj = in.readObject()
+ if (obj != CtrieSerializationEnd) {
+ val k = obj.asInstanceOf[K]
+ val v = in.readObject().asInstanceOf[V]
+ update(k, v)
+ }
+ } while (obj != CtrieSerializationEnd)
+ }
+
+ @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+
+ final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+
+ @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ val r = /*READ*/root
+ r match {
+ case in: INode[K, V] => in
+ case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort)
+ }
+ }
+
+ @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = {
+ val v = /*READ*/root
+ v match {
+ case in: INode[K, V] => in
+ case desc: RDCSS_Descriptor[K, V] =>
+ val RDCSS_Descriptor(ov, exp, nv) = desc
+ if (abort) {
+ if (CAS_ROOT(desc, ov)) ov
+ else RDCSS_Complete(abort)
+ } else {
+ val oldmain = ov.gcasRead(this)
+ if (oldmain eq exp) {
+ if (CAS_ROOT(desc, nv)) {
+ desc.committed = true
+ nv
+ } else RDCSS_Complete(abort)
+ } else {
+ if (CAS_ROOT(desc, ov)) ov
+ else RDCSS_Complete(abort)
+ }
+ }
+ }
+ }
+
+ private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
+ val desc = RDCSS_Descriptor(ov, expectedmain, nv)
+ if (CAS_ROOT(ov, desc)) {
+ RDCSS_Complete(false)
+ /*READ*/desc.committed
+ } else false
+ }
+
+ @tailrec private def inserthc(k: K, hc: Int, v: V) {
+ val r = RDCSS_READ_ROOT()
+ if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v)
+ }
+
+ @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = {
+ val r = RDCSS_READ_ROOT()
+
+ val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this)
+ if (ret eq null) insertifhc(k, hc, v, cond)
+ else ret
+ }
+
+ @tailrec private def lookuphc(k: K, hc: Int): AnyRef = {
+ val r = RDCSS_READ_ROOT()
+ val res = r.rec_lookup(k, hc, 0, null, r.gen, this)
+ if (res eq INodeBase.RESTART) lookuphc(k, hc)
+ else res
+ }
+
+ /* slower:
+ //@tailrec
+ private def lookuphc(k: K, hc: Int): AnyRef = {
+ val r = RDCSS_READ_ROOT()
+ try {
+ r.rec_lookup(k, hc, 0, null, r.gen, this)
+ } catch {
+ case RestartException =>
+ lookuphc(k, hc)
+ }
+ }
+ */
+
+ @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = {
+ val r = RDCSS_READ_ROOT()
+ val res = r.rec_remove(k, v, hc, 0, null, r.gen, this)
+ if (res ne null) res
+ else removehc(k, v, hc)
+ }
+
+ def string = RDCSS_READ_ROOT().string(0)
+
+ /* public methods */
+
+ override def seq = this
+
+ override def par = new ParCtrie(this)
+
+ override def empty: Ctrie[K, V] = new Ctrie[K, V]
+
+ final def isReadOnly = rootupdater eq null
+
+ final def nonReadOnly = rootupdater ne null
+
+ /** Returns a snapshot of this Ctrie.
+ * This operation is lock-free and linearizable.
+ *
+ * The snapshot is lazily updated - the first time some branch
+ * in the snapshot or this Ctrie are accessed, they are rewritten.
+ * This means that the work of rebuilding both the snapshot and this
+ * Ctrie is distributed across all the threads doing updates or accesses
+ * subsequent to the snapshot creation.
+ */
+ @tailrec final def snapshot(): Ctrie[K, V] = {
+ val r = RDCSS_READ_ROOT()
+ val expmain = r.gcasRead(this)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater)
+ else snapshot()
+ }
+
+ /** Returns a read-only snapshot of this Ctrie.
+ * This operation is lock-free and linearizable.
+ *
+ * The snapshot is lazily updated - the first time some branch
+ * of this Ctrie are accessed, it is rewritten. The work of creating
+ * the snapshot is thus distributed across subsequent updates
+ * and accesses on this Ctrie by all threads.
+ * Note that the snapshot itself is never rewritten unlike when calling
+ * the `snapshot` method, but the obtained snapshot cannot be modified.
+ *
+ * This method is used by other methods such as `size` and `iterator`.
+ */
+ @tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
+ val r = RDCSS_READ_ROOT()
+ val expmain = r.gcasRead(this)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null)
+ else readOnlySnapshot()
+ }
+
+ @tailrec final override def clear() {
+ val r = RDCSS_READ_ROOT()
+ if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
+ }
+
+ final def lookup(k: K): V = {
+ val hc = computeHash(k)
+ lookuphc(k, hc).asInstanceOf[V]
+ }
+
+ final override def apply(k: K): V = {
+ val hc = computeHash(k)
+ val res = lookuphc(k, hc)
+ if (res eq null) throw new NoSuchElementException
+ else res.asInstanceOf[V]
+ }
+
+ final def get(k: K): Option[V] = {
+ val hc = computeHash(k)
+ Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
+ }
+
+ override def put(key: K, value: V): Option[V] = {
+ val hc = computeHash(key)
+ insertifhc(key, hc, value, null)
+ }
+
+ final override def update(k: K, v: V) {
+ val hc = computeHash(k)
+ inserthc(k, hc, v)
+ }
+
+ final def +=(kv: (K, V)) = {
+ update(kv._1, kv._2)
+ this
+ }
+
+ final override def remove(k: K): Option[V] = {
+ val hc = computeHash(k)
+ removehc(k, null.asInstanceOf[V], hc)
+ }
+
+ final def -=(k: K) = {
+ remove(k)
+ this
+ }
+
+ def putIfAbsent(k: K, v: V): Option[V] = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_ABSENT)
+ }
+
+ def remove(k: K, v: V): Boolean = {
+ val hc = computeHash(k)
+ removehc(k, v, hc).nonEmpty
+ }
+
+ def replace(k: K, oldvalue: V, newvalue: V): Boolean = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty
+ }
+
+ def replace(k: K, v: V): Option[V] = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_PRESENT)
+ }
+
+ def iterator: Iterator[(K, V)] =
+ if (nonReadOnly) readOnlySnapshot().iterator
+ else new CtrieIterator(0, this)
+
+ private def cachedSize() = {
+ val r = RDCSS_READ_ROOT()
+ r.cachedSize(this)
+ }
+
+ override def size: Int =
+ if (nonReadOnly) readOnlySnapshot().size
+ else cachedSize()
+
+ override def stringPrefix = "Ctrie"
+
+}
+
+
+object Ctrie extends MutableMapFactory[Ctrie] {
+ val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode")
+
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V]
+
+ def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V]
+
+ @inline final def computeHash[K](k: K): Int = {
+ var hcode = k.hashCode
+ hcode = hcode * 0x9e3775cd
+ hcode = java.lang.Integer.reverseBytes(hcode)
+ hcode * 0x9e3775cd
+ }
+
+}
+
+
+private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
+ var stack = new Array[Array[BasicNode]](7)
+ var stackpos = new Array[Int](7)
+ var depth = -1
+ var subiter: Iterator[(K, V)] = null
+ var current: KVNode[K, V] = null
+
+ if (mustInit) initialize()
+
+ def hasNext = (current ne null) || (subiter ne null)
+
+ def next() = if (hasNext) {
+ var r: (K, V) = null
+ if (subiter ne null) {
+ r = subiter.next()
+ checkSubiter()
+ } else {
+ r = current.kvPair
+ advance()
+ }
+ r
+ } else Iterator.empty.next()
+
+ private def readin(in: INode[K, V]) = in.gcasRead(ct) match {
+ case cn: CNode[K, V] =>
+ depth += 1
+ stack(depth) = cn.array
+ stackpos(depth) = -1
+ advance()
+ case tn: TNode[K, V] =>
+ current = tn
+ case ln: LNode[K, V] =>
+ subiter = ln.listmap.iterator
+ checkSubiter()
+ case null =>
+ current = null
+ }
+
+ @inline private def checkSubiter() = if (!subiter.hasNext) {
+ subiter = null
+ advance()
+ }
+
+ @inline private def initialize() {
+ assert(ct.isReadOnly)
+
+ val r = ct.RDCSS_READ_ROOT()
+ readin(r)
+ }
+
+ def advance(): Unit = if (depth >= 0) {
+ val npos = stackpos(depth) + 1
+ if (npos < stack(depth).length) {
+ stackpos(depth) = npos
+ stack(depth)(npos) match {
+ case sn: SNode[K, V] =>
+ current = sn
+ case in: INode[K, V] =>
+ readin(in)
+ }
+ } else {
+ depth -= 1
+ advance()
+ }
+ } else current = null
+
+ protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit)
+
+ protected def dupTo(it: CtrieIterator[K, V]) = {
+ it.level = this.level
+ it.ct = this.ct
+ it.depth = this.depth
+ it.current = this.current
+
+ // these need a deep copy
+ Array.copy(this.stack, 0, it.stack, 0, 7)
+ Array.copy(this.stackpos, 0, it.stackpos, 0, 7)
+
+ // this one needs to be evaluated
+ if (this.subiter == null) it.subiter = null
+ else {
+ val lst = this.subiter.toList
+ this.subiter = lst.iterator
+ it.subiter = lst.iterator
+ }
+ }
+
+ /** Returns a sequence of iterators over subsets of this iterator.
+ * It's used to ease the implementation of splitters for a parallel version of the Ctrie.
+ */
+ protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) {
+ // the case where an LNode is being iterated
+ val it = subiter
+ subiter = null
+ advance()
+ this.level += 1
+ Seq(it, this)
+ } else if (depth == -1) {
+ this.level += 1
+ Seq(this)
+ } else {
+ var d = 0
+ while (d <= depth) {
+ val rem = stack(d).length - 1 - stackpos(d)
+ if (rem > 0) {
+ val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2)
+ stack(d) = arr1
+ stackpos(d) = -1
+ val it = newIterator(level + 1, ct, false)
+ it.stack(0) = arr2
+ it.stackpos(0) = -1
+ it.depth = 0
+ it.advance() // <-- fix it
+ this.level += 1
+ return Seq(this, it)
+ }
+ d += 1
+ }
+ this.level += 1
+ Seq(this)
+ }
+
+ def printDebug {
+ println("ctrie iterator")
+ println(stackpos.mkString(","))
+ println("depth: " + depth)
+ println("curr.: " + current)
+ println(stack.mkString("\n"))
+ }
+
+}
+
+
+private[mutable] object RestartException extends util.control.ControlThrowable
+
+
+/** Only used for ctrie serialization. */
+@SerialVersionUID(0L - 7237891413820527142L)
+private[mutable] case object CtrieSerializationEnd
+
+
+private[mutable] object Debug {
+ import collection._
+
+ lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
+
+ def log(s: AnyRef) = logbuffer.add(s)
+
+ def flush() {
+ for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString)
+ logbuffer.clear()
+ }
+
+ def clear() {
+ logbuffer.clear()
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f3fb6738eb..ee6d4d1d22 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -43,19 +43,19 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** The array keeping track of number of elements in 32 element blocks.
*/
@transient protected var sizemap: Array[Int] = null
-
+
@transient var seedvalue: Int = tableSizeSeed
-
+
import HashTable.powerOfTwo
-
+
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
-
+
private def initialCapacity = capacity(initialSize)
-
+
protected def randomSeed = seedGenerator.get.nextInt()
-
+
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
-
+
/**
* Initializes the collection from the input stream. `f` will be called for each element
* read from the input stream in the order determined by the stream. This is useful for
@@ -65,22 +65,22 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
in.defaultReadObject
-
+
_loadFactor = in.readInt()
assert(_loadFactor > 0)
-
+
val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
+
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
threshold = newThreshold(_loadFactor, table.size)
-
+
seedvalue = in.readInt()
-
+
val smDefined = in.readBoolean()
if (smDefined) sizeMapInit(table.length) else sizemap = null
-
+
var index = 0
while (index < size) {
val elem = in.readObject().asInstanceOf[A]
@@ -295,12 +295,12 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
protected final def index(hcode: Int) = {
// version 1 (no longer used - did not work with parallel hash tables)
// improve(hcode) & (table.length - 1)
-
+
// version 2 (allows for parallel hash table construction)
val improved = improve(hcode, seedvalue)
val ones = table.length - 1
(improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones
-
+
// version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables)
// val hc = improve(hcode)
// val bbp = blockbitpos
@@ -345,17 +345,17 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
private[collection] object FlatHashTable {
-
+
/** Creates a specific seed to improve hashcode of a hash table instance
* and ensure that iteration order vulnerabilities are not 'felt' in other
* hash tables.
- *
+ *
* See SI-5293.
*/
final def seedGenerator = new ThreadLocal[util.Random] {
override def initialValue = new util.Random
}
-
+
/** The load factor for the hash table; must be < 500 (0.5)
*/
def defaultLoadFactor: Int = 450
@@ -396,11 +396,11 @@ private[collection] object FlatHashTable {
//h = h ^ (h >>> 14)
//h = h + (h << 4)
//h ^ (h >>> 10)
-
+
var i = hcode * 0x9e3775cd
i = java.lang.Integer.reverseBytes(i)
val improved = i * 0x9e3775cd
-
+
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
val rotation = seed % 32
diff --git a/src/library/scala/collection/mutable/Gen.java b/src/library/scala/collection/mutable/Gen.java
new file mode 100644
index 0000000000..0c9a30d198
--- /dev/null
+++ b/src/library/scala/collection/mutable/Gen.java
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+
+
+
+final class Gen {
+}
+
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index cdf1b78f29..cc0aed6963 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -52,6 +52,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
+ @transient var seedvalue: Int = tableSizeSeed
+
+ protected def tableSizeSeed = Integer.bitCount(table.length - 1)
+
protected def initialSize: Int = HashTable.initialSize
private def lastPopulatedIndex = {
@@ -70,14 +74,16 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
in.defaultReadObject
- _loadFactor = in.readInt
+ _loadFactor = in.readInt()
assert(_loadFactor > 0)
- val size = in.readInt
+ val size = in.readInt()
tableSize = 0
assert(size >= 0)
- val smDefined = in.readBoolean
+ seedvalue = in.readInt()
+
+ val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
threshold = newThreshold(_loadFactor, table.size)
@@ -86,7 +92,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B]))
+ addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B]))
index += 1
}
}
@@ -103,6 +109,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
+ out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
foreachEntry { entry =>
out.writeObject(entry.key)
@@ -314,7 +321,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
// this is of crucial importance when populating the table in parallel
protected final def index(hcode: Int) = {
val ones = table.length - 1
- val improved = improve(hcode)
+ val improved = improve(hcode, seedvalue)
val shifted = (improved >> (32 - java.lang.Integer.bitCount(ones))) & ones
shifted
}
@@ -325,6 +332,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
table = c.table
tableSize = c.tableSize
threshold = c.threshold
+ seedvalue = c.seedvalue
sizemap = c.sizemap
}
if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
@@ -335,6 +343,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
table,
tableSize,
threshold,
+ seedvalue,
sizemap
)
}
@@ -368,7 +377,7 @@ private[collection] object HashTable {
protected def elemHashCode(key: KeyType) = key.##
- protected final def improve(hcode: Int) = {
+ protected final def improve(hcode: Int, seed: Int) = {
/* Murmur hash
* m = 0x5bd1e995
* r = 24
@@ -396,7 +405,7 @@ private[collection] object HashTable {
* */
var i = hcode * 0x9e3775cd
i = java.lang.Integer.reverseBytes(i)
- i * 0x9e3775cd
+ i = i * 0x9e3775cd
// a slower alternative for byte reversal:
// i = (i << 16) | (i >> 16)
// i = ((i >> 8) & 0x00ff00ff) | ((i << 8) & 0xff00ff00)
@@ -420,6 +429,11 @@ private[collection] object HashTable {
// h = h ^ (h >>> 14)
// h = h + (h << 4)
// h ^ (h >>> 10)
+
+ // the rest of the computation is due to SI-5293
+ val rotation = seed % 32
+ val rotated = (i >>> rotation) | (i << (32 - rotation))
+ rotated
}
}
@@ -442,6 +456,7 @@ private[collection] object HashTable {
val table: Array[HashEntry[A, Entry]],
val tableSize: Int,
val threshold: Int,
+ val seedvalue: Int,
val sizemap: Array[Int]
) {
import collection.DebugUtils._
@@ -452,6 +467,7 @@ private[collection] object HashTable {
append("Table: [" + arrayString(table, 0, table.length) + "]")
append("Table size: " + tableSize)
append("Load factor: " + loadFactor)
+ append("Seedvalue: " + seedvalue)
append("Threshold: " + threshold)
append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]")
}
diff --git a/src/library/scala/collection/mutable/INodeBase.java b/src/library/scala/collection/mutable/INodeBase.java
new file mode 100644
index 0000000000..487b5cfc28
--- /dev/null
+++ b/src/library/scala/collection/mutable/INodeBase.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class INodeBase<K, V> extends BasicNode {
+
+ public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
+
+ public static final Object RESTART = new Object();
+
+ public volatile MainNode<K, V> mainnode = null;
+
+ public final Gen gen;
+
+ public INodeBase(Gen generation) {
+ gen = generation;
+ }
+
+ public BasicNode prev() {
+ return null;
+ }
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 53c876ec08..037f3b2939 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -62,22 +62,22 @@ final class ListBuffer[A]
private var len = 0
protected def underlying: immutable.Seq[A] = start
-
+
private def writeObject(out: ObjectOutputStream) {
// write start
var xs: List[A] = start
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
-
+
// no need to write last0
-
+
// write if exported
out.writeBoolean(exported)
-
+
// write the length
out.writeInt(len)
}
-
+
private def readObject(in: ObjectInputStream) {
// read start, set last0 appropriately
var elem: A = in.readObject.asInstanceOf[A]
@@ -97,14 +97,14 @@ final class ListBuffer[A]
last0 = current
start
}
-
+
// read if exported
exported = in.readBoolean()
-
+
// read the length
len = in.readInt()
}
-
+
/** The current length of the buffer.
*
* This operation takes constant time.
diff --git a/src/library/scala/collection/mutable/MainNode.java b/src/library/scala/collection/mutable/MainNode.java
new file mode 100644
index 0000000000..0578de676d
--- /dev/null
+++ b/src/library/scala/collection/mutable/MainNode.java
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class MainNode<K, V> extends BasicNode {
+
+ public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
+
+ public volatile MainNode<K, V> prev = null;
+
+ public abstract int cachedSize(Object ct);
+
+ public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) {
+ return updater.compareAndSet(this, oldval, nval);
+ }
+
+ public void WRITE_PREV(MainNode<K, V> nval) {
+ updater.set(this, nval);
+ }
+
+ // do we need this? unclear in the javadocs...
+ // apparently not - volatile reads are supposed to be safe
+ // irregardless of whether there are concurrent ARFU updates
+ public MainNode<K, V> READ_PREV() {
+ return updater.get(this);
+ }
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index d87fc0b4a2..f41a51d3ef 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -13,12 +13,12 @@ import generic._
/**
* Base trait for mutable sorted set.
- *
+ *
* @define Coll mutable.SortedSet
* @define coll mutable sorted set
*
* @author Lucien Pereira
- *
+ *
*/
trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike[A,SortedSet[A]]
with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
@@ -39,11 +39,11 @@ trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike
* Standard `CanBuildFrom` instance for sorted sets.
*
* @author Lucien Pereira
- *
+ *
*/
object SortedSet extends MutableSortedSetFactory[SortedSet] {
implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
-
+
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
-
+
}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index e0f1c3adfe..02ee811193 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -11,14 +11,14 @@ package mutable
import generic._
-/**
+/**
* @define Coll mutable.TreeSet
* @define coll mutable tree set
* @factoryInfo
* Companion object of TreeSet providing factory related utilities.
- *
+ *
* @author Lucien Pereira
- *
+ *
*/
object TreeSet extends MutableSortedSetFactory[TreeSet] {
/**
@@ -32,7 +32,7 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
* A mutable SortedSet using an immutable AVL Tree as underlying data structure.
*
* @author Lucien Pereira
- *
+ *
*/
class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
@@ -67,7 +67,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* Cardinality store the set size, unfortunately a
* set view (given by rangeImpl)
* cannot take advantage of this optimisation
- *
+ *
*/
override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
@@ -101,7 +101,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* Thanks to the immutable nature of the
* underlying AVL Tree, we can share it with
* the clone. So clone complexity in time is O(1).
- *
+ *
*/
override def clone: TreeSet[A] = {
val clone = new TreeSet[A](base, from, until)
@@ -119,5 +119,5 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
override def iterator: Iterator[A] = resolve.avl.iterator
.dropWhile(e => !isLeftAcceptable(from, ordering)(e))
.takeWhile(e => isRightAcceptable(until, ordering)(e))
-
+
}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index d1453c9ce9..6afe901258 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -33,9 +33,21 @@ import scala.collection.generic.Sizing
* @since 2.9
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-//self: EnvironmentPassingCombiner[Elem, To] =>
- private[collection] final val tasksupport = getTaskSupport
-
+
+ @transient
+ @volatile
+ var _combinerTaskSupport = defaultTaskSupport
+
+ def combinerTaskSupport = {
+ val cts = _combinerTaskSupport
+ if (cts eq null) {
+ _combinerTaskSupport = defaultTaskSupport
+ defaultTaskSupport
+ } else cts
+ }
+
+ def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts
+
/** Combines the contents of the receiver builder and the `other` builder,
* producing a new builder containing both their elements.
*
@@ -63,6 +75,21 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
*/
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
+ /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared
+ * across several threads constructing the collection.
+ *
+ * By default, this method returns `false`.
+ */
+ def canBeShared: Boolean = false
+
+ /** Constructs the result and sets the appropriate tasksupport object to the resulting collection
+ * if this is applicable.
+ */
+ def resultWithTaskSupport: To = {
+ val res = result
+ setTaskSupport(res, combinerTaskSupport)
+ }
+
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 390bd72ab5..5e6bf8c1a3 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -28,7 +28,7 @@ import immutable.HashMapCombiner
import java.util.concurrent.atomic.AtomicBoolean
import annotation.unchecked.uncheckedVariance
-
+import annotation.unchecked.uncheckedStable
/** A template trait for parallel collections of type `ParIterable[T]`.
@@ -96,17 +96,6 @@ import annotation.unchecked.uncheckedVariance
* The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible
* way to change between different collection types.
*
- * The method:
- *
- * {{{
- * def threshold(sz: Int, p: Int): Int
- * }}}
- *
- * provides an estimate on the minimum number of elements the collection has before
- * the splitting stops and depends on the number of elements in the collection. A rule of the
- * thumb is the number of elements divided by 8 times the parallelism level. This method may
- * be overridden in concrete implementations if necessary.
- *
* Since this trait extends the `Iterable` trait, methods like `size` must also
* be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
@@ -165,49 +154,25 @@ extends GenIterableLike[T, Repr]
with HasNewCombiner[T, Repr]
{
self: ParIterableLike[T, Repr, Sequential] =>
-
- import tasksupport._
-
+
+ @transient
+ @volatile
+ private var _tasksupport = defaultTaskSupport
+
+ def tasksupport = {
+ val ts = _tasksupport
+ if (ts eq null) {
+ _tasksupport = defaultTaskSupport
+ defaultTaskSupport
+ } else ts
+ }
+
+ def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
+
def seq: Sequential
def repr: Repr = this.asInstanceOf[Repr]
- /** Parallel iterators are split iterators that have additional accessor and
- * transformer methods defined in terms of methods `next` and `hasNext`.
- * When creating a new parallel collection, one might want to override these
- * new methods to make them more efficient.
- *
- * Parallel iterators are augmented with signalling capabilities. This means
- * that a signalling object can be assigned to them as needed.
- *
- * The self-type ensures that signal context passing behaviour gets mixed in
- * a concrete object instance.
- */
- trait ParIterator extends IterableSplitter[T] {
- me: SignalContextPassingIterator[ParIterator] =>
- var signalDelegate: Signalling = IdleSignalling
- def repr = self.repr
- def split: Seq[IterableSplitter[T]]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParIterator] extends ParIterator {
- // Note: This functionality must be factored out to this inner trait to avoid boilerplate.
- // Also, one could omit the cast below. However, this leads to return type inconsistencies,
- // due to inability to override the return type of _abstract overrides_.
- // Be aware that this stackable modification has to be subclassed, so it shouldn't be rigid
- // on the type of iterators it splits.
- // The alternative is some boilerplate - better to tradeoff some type safety to avoid it here.
- abstract override def split: Seq[IterRepr] = {
- val pits = super.split
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
-
def hasDefiniteSize = true
def nonEmpty = size != 0
@@ -242,18 +207,6 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
def isStrictSplitterCollection = true
- /** Some minimal number of elements after which this collection should be handled
- * sequentially by different processors.
- *
- * This method depends on the size of the collection and the parallelism level, which
- * are both specified as arguments.
- *
- * @param sz the size based on which to compute the threshold
- * @param p the parallelism level based on which to compute the threshold
- * @return the maximum number of elements for performing operations sequentially
- */
- def threshold(sz: Int, p: Int): Int = thresholdFromSize(sz, p)
-
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
@@ -365,7 +318,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* if this $coll is empty.
*/
def reduce[U >: T](op: (U, U) => U): U = {
- executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get })
+ tasksupport.executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get })
}
/** Optionally reduces the elements of this sequence using the specified associative binary operator.
@@ -400,7 +353,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the result of applying fold operator `op` between all the elements and `z`
*/
def fold[U >: T](z: U)(op: (U, U) => U): U = {
- executeAndWaitResult(new Fold(z, op, splitter))
+ tasksupport.executeAndWaitResult(new Fold(z, op, splitter))
}
/** Aggregates the results of applying an operator to subsequent elements.
@@ -432,13 +385,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param combop an associative operator used to combine results from different partitions
*/
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
+ tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
- def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
-
- def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
-
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op)
@@ -451,47 +400,33 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /*
- /** Applies a function `f` to all the elements of $coll. Does so in a nondefined order,
- * and in parallel.
- *
- * $undefinedorder
- *
- * @tparam U the result type of the function applied to each element, which is always discarded
- * @param f function applied to each element
- */
- def pareach[U](f: T => U): Unit = {
- executeAndWaitResult(new Foreach(f, splitter))
- }
- */
-
/** Applies a function `f` to all the elements of $coll in a sequential order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
*/
def foreach[U](f: T => U) = {
- executeAndWaitResult(new Foreach(f, splitter))
+ tasksupport.executeAndWaitResult(new Foreach(f, splitter))
}
def count(p: T => Boolean): Int = {
- executeAndWaitResult(new Count(p, splitter))
+ tasksupport.executeAndWaitResult(new Count(p, splitter))
}
def sum[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Sum[U](num, splitter))
+ tasksupport.executeAndWaitResult(new Sum[U](num, splitter))
}
def product[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Product[U](num, splitter))
+ tasksupport.executeAndWaitResult(new Product[U](num, splitter))
}
def min[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T]
+ tasksupport.executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
def max[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T]
+ tasksupport.executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
@@ -507,24 +442,24 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new Map[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result })
- } else seq.map(f)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport })
+ } else setTaskSupport(seq.map(f)(bf2seq(bf)), tasksupport)
/*bf ifParallel { pbf =>
- executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
} otherwise seq.map(f)(bf2seq(bf))*/
def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new Collect[S, That](pf, () => bf(repr).asCombiner, splitter) mapResult { _.result })
- } else seq.collect(pf)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport })
+ } else setTaskSupport(seq.collect(pf)(bf2seq(bf)), tasksupport)
/*bf ifParallel { pbf =>
- executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
} otherwise seq.collect(pf)(bf2seq(bf))*/
def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new FlatMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result })
- } else seq.flatMap(f)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport })
+ } else setTaskSupport(seq.flatMap(f)(bf2seq(bf)), tasksupport)
/*bf ifParallel { pbf =>
- executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
} otherwise seq.flatMap(f)(bf2seq(bf))*/
/** Tests whether a predicate holds for all elements of this $coll.
@@ -535,7 +470,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return true if `p` holds for all elements, false otherwise
*/
def forall(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ tasksupport.executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Tests whether a predicate holds for some element of this $coll.
@@ -546,7 +481,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return true if `p` holds for some element, false otherwise
*/
def exists(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ tasksupport.executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Finds some element in the collection for which the predicate holds, if such
@@ -561,19 +496,52 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return an option value with the element if such an element exists, or `None` otherwise
*/
def find(pred: T => Boolean): Option[T] = {
- executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ tasksupport.executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ }
+
+ /** Creates a combiner factory. Each combiner factory instance is used
+ * once per invocation of a parallel transformer method for a single
+ * collection.
+ *
+ * The default combiner factory creates a new combiner every time it
+ * is requested, unless the combiner is thread-safe as indicated by its
+ * `canBeShared` method. In this case, the method returns a factory which
+ * returns the same combiner each time. This is typically done for
+ * concurrent parallel collections, the combiners of which allow
+ * thread safe access.
+ */
+ protected[this] def combinerFactory = {
+ val combiner = newCombiner
+ combiner.combinerTaskSupport = tasksupport
+ if (combiner.canBeShared) new CombinerFactory[T, Repr] {
+ val shared = combiner
+ def apply() = shared
+ def doesShareCombiners = true
+ } else new CombinerFactory[T, Repr] {
+ def apply() = newCombiner
+ def doesShareCombiners = false
+ }
}
- protected[this] def cbfactory ={
- () => newCombiner
+ protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = {
+ val combiner = cbf()
+ combiner.combinerTaskSupport = tasksupport
+ if (combiner.canBeShared) new CombinerFactory[S, That] {
+ val shared = combiner
+ def apply() = shared
+ def doesShareCombiners = true
+ } else new CombinerFactory[S, That] {
+ def apply() = cbf()
+ def doesShareCombiners = false
+ }
}
def filter(pred: T => Boolean): Repr = {
- executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
def filterNot(pred: T => Boolean): Repr = {
- executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
@@ -581,49 +549,54 @@ self: ParIterableLike[T, Repr, Sequential] =>
// println("case both are parallel")
val other = that.asParIterable
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), splitter)
+ val cfactory = combinerFactory(() => pbf(repr))
+ val copythis = new Copy(cfactory, splitter)
val copythat = wrap {
- val othtask = new other.Copy(() => pbf(self.repr), other.splitter)
+ val othtask = new other.Copy(cfactory, other.splitter)
tasksupport.executeAndWaitResult(othtask)
}
val task = (copythis parallel copythat) { _ combine _ } mapResult {
- _.result
+ _.resultWithTaskSupport
}
- executeAndWaitResult(task)
- } else if (bf.isParallel) {
+ tasksupport.executeAndWaitResult(task)
+ } else if (bf(repr).isCombiner) {
// println("case parallel builder, `that` not parallel")
- val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), splitter)
+ val copythis = new Copy(combinerFactory(() => bf(repr).asCombiner), splitter)
val copythat = wrap {
- val cb = pbf(repr)
+ val cb = bf(repr).asCombiner
for (elem <- that.seq) cb += elem
cb
}
- executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.result })
+ tasksupport.executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.resultWithTaskSupport })
} else {
// println("case not a parallel builder")
val b = bf(repr)
this.splitter.copy2builder[U, That, Builder[U, That]](b)
for (elem <- that.seq) b += elem
- b.result
+ setTaskSupport(b.result, tasksupport)
}
}
def partition(pred: T => Boolean): (Repr, Repr) = {
- executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
+ tasksupport.executeAndWaitResult(
+ new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult {
+ p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport)
+ }
+ )
}
def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
- executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
- rcb => rcb.groupByKey(cbfactory)
+ val r = tasksupport.executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
+ rcb => rcb.groupByKey(() => combinerFactory())
})
+ setTaskSupport(r, tasksupport)
}
def take(n: Int): Repr = {
val actualn = if (size > n) n else size
if (actualn < MIN_FOR_COPY) take_sequential(actualn)
- else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult {
- _.result
+ else tasksupport.executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult {
+ _.resultWithTaskSupport
})
}
@@ -636,13 +609,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb += it.next
left -= 1
}
- cb.result
+ cb.resultWithTaskSupport
}
def drop(n: Int): Repr = {
val actualn = if (size > n) n else size
if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn)
- else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result })
+ else tasksupport.executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
private def drop_sequential(n: Int) = {
@@ -650,14 +623,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
val cb = newCombiner
cb.sizeHint(size - n)
while (it.hasNext) cb += it.next
- cb.result
+ cb.resultWithTaskSupport
}
override def slice(unc_from: Int, unc_until: Int): Repr = {
val from = unc_from min size max 0
val until = unc_until min size max from
if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until)
- else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result })
+ else tasksupport.executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
private def slice_sequential(from: Int, until: Int): Repr = {
@@ -668,11 +641,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb += it.next
left -= 1
}
- cb.result
+ cb.resultWithTaskSupport
}
def splitAt(n: Int): (Repr, Repr) = {
- executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
+ tasksupport.executeAndWaitResult(
+ new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult {
+ p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport)
+ }
+ )
}
/** Computes a prefix scan of the elements of the collection.
@@ -690,20 +667,19 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* @return a new $coll containing the prefix scan of the elements in this $coll
*/
- def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf.isParallel) {
- val cbf = bf.asParallel
- if (parallelismLevel > 1) {
- if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
- tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
- cb => cb.result
+ def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
+ if (tasksupport.parallelismLevel > 1) {
+ if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
+ tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult {
+ cb => cb.resultWithTaskSupport
})
- }) else (cbf(self.repr) += z).result
- } else seq.scan(z)(op)(bf2seq(bf))
- } else seq.scan(z)(op)(bf2seq(bf))
+ }) else setTaskSupport((bf(repr) += z).result, tasksupport)
+ } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
+ } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
- def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanLeft(z)(op)(bf2seq(bf))
+ def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanLeft(z)(op)(bf2seq(bf)), tasksupport)
- def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanRight(z)(op)(bf2seq(bf))
+ def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanRight(z)(op)(bf2seq(bf)), tasksupport)
/** Takes the longest prefix of elements that satisfy the predicate.
*
@@ -714,9 +690,19 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the longest prefix of this $coll of elements that satisy the predicate `pred`
*/
def takeWhile(pred: T => Boolean): Repr = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result })
+ val cbf = combinerFactory
+ if (cbf.doesShareCombiners) {
+ val parseqspan = toSeq.takeWhile(pred)
+ tasksupport.executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult {
+ _.resultWithTaskSupport
+ })
+ } else {
+ val cntx = new DefaultSignalling with AtomicIndexFlag
+ cntx.setIndexFlag(Int.MaxValue)
+ tasksupport.executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult {
+ _._1.resultWithTaskSupport
+ })
+ }
}
/** Splits this $coll into a prefix/suffix pair according to a predicate.
@@ -729,11 +715,22 @@ self: ParIterableLike[T, Repr, Sequential] =>
* the elements satisfy `pred`, and the rest of the collection
*/
def span(pred: T => Boolean): (Repr, Repr) = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult {
- p => (p._1.result, p._2.result)
- })
+ val cbf = combinerFactory
+ if (cbf.doesShareCombiners) {
+ val (xs, ys) = toSeq.span(pred)
+ val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.resultWithTaskSupport }
+ val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.resultWithTaskSupport }
+ val copyall = (copyxs parallel copyys) {
+ (xr, yr) => (xr, yr)
+ }
+ tasksupport.executeAndWaitResult(copyall)
+ } else {
+ val cntx = new DefaultSignalling with AtomicIndexFlag
+ cntx.setIndexFlag(Int.MaxValue)
+ tasksupport.executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult {
+ p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport)
+ })
+ }
}
/** Drops all elements in the longest prefix of elements that satisfy the predicate,
@@ -749,7 +746,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def dropWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result })
+ tasksupport.executeAndWaitResult(
+ new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult {
+ _._2.resultWithTaskSupport
+ }
+ )
}
def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0)
@@ -757,31 +758,33 @@ self: ParIterableLike[T, Repr, Sequential] =>
def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start)
def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
- executeAndWaitResult(new CopyToArray(start, len, xs, splitter))
+ tasksupport.executeAndWaitResult(new CopyToArray(start, len, xs, splitter))
}
def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that)
- def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
- val pbf = bf.asParallel
+ def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result });
- } else seq.zip(that)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport });
+ } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport)
def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
- def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
- val pbf = bf.asParallel
+ def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result });
- } else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(
+ new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
+ _.resultWithTaskSupport
+ }
+ );
+ } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport)
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result });
+ tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport });
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
- executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport })
}
def view = new ParIterableView[T, Repr, Sequential] {
@@ -838,8 +841,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends StrictSplitterCheckTask[R, Tp] {
protected[this] val pit: IterableSplitter[T]
protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
- def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel)
- def split = pit.split.map(newSubtask(_)) // default split procedure
+ def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel)
+ def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure
private[parallel] override def signalAbort = pit.abort
override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")"
}
@@ -869,22 +872,22 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** Sequentially performs one task after another. */
protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
- (f: First, s: Second)
+ (f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- executeAndWaitResult(ft)
- executeAndWaitResult(st)
+ tasksupport.executeAndWaitResult(ft)
+ tasksupport.executeAndWaitResult(st)
mergeSubtasks
}
}
/** Performs two tasks in parallel, and waits for both to finish. */
protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
- (f: First, s: Second)
+ (f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- val ftfuture = execute(ft)
- executeAndWaitResult(st)
+ val ftfuture = tasksupport.execute(ft)
+ tasksupport.executeAndWaitResult(st)
ftfuture()
mergeSubtasks
}
@@ -895,7 +898,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
@volatile var result: R1 = null.asInstanceOf[R1]
def map(r: R): R1
def leaf(prevr: Option[R1]) = {
- val initialResult = executeAndWaitResult(inner)
+ val initialResult = tasksupport.executeAndWaitResult(inner)
result = map(initialResult)
}
private[parallel] override def signalAbort() {
@@ -906,13 +909,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
- protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] {
+ protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
def leaf(prevr: Option[Unit]) = pit.foreach(op)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p)
}
- protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] {
+ protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Int, Count] {
// val pittxt = pit.toString
@volatile var result: Int = 0
def leaf(prevr: Option[Int]) = result = pit.count(pred)
@@ -921,7 +926,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
// override def toString = "CountTask(" + pittxt + ")"
}
- protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] {
+ protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p)
@@ -931,7 +937,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] {
+ protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Fold[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p)
@@ -946,21 +953,24 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
- protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] {
+ protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Sum[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.sum(num)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p)
override def merge(that: Sum[U]) = result = num.plus(result, that.result)
}
- protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] {
+ protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Product[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.product(num)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p)
override def merge(that: Product[U]) = result = num.times(result, that.result)
}
- protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] {
+ protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Min[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p)
@@ -970,7 +980,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] {
+ protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Max[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p)
@@ -980,16 +991,16 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Map[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Map[S, That]] {
@volatile var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf()))
- protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf()))
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p)
override def merge(that: Map[S, That]) = result = result combine that.result
}
protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T])
+ (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Collect[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf())
@@ -998,7 +1009,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class FlatMap[S, That]
- (f: T => GenTraversableOnce[S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T])
+ (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], FlatMap[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf())
@@ -1010,28 +1021,31 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] {
+ protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
- protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] {
+ protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
- protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] {
+ protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
- protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Filter[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1041,7 +1055,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Filter[U, This]) = result = result combine that.result
}
- protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], FilterNot[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1051,7 +1065,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: FilterNot[U, This]) = result = result combine that.result
}
- protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
+ protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Copy[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
@@ -1059,11 +1073,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Copy[U, That]) = result = result combine that.result
}
- protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Partition[U >: T, This >: Repr]
+ (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p)
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse()))
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p)
override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
}
@@ -1090,7 +1105,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Take[U >: T, This >: Repr]
+ (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Take[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1098,7 +1114,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, untilp) <- pits zip sizes; if untilp <= n) yield {
if (untilp + p.remaining < n) new Take(p.remaining, cbf, p)
@@ -1109,13 +1125,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Drop[U >: T, This >: Repr]
+ (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Drop[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield {
if (withp - p.remaining > n) new Drop(0, cbf, p)
@@ -1126,13 +1143,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Slice[U >: T, This >: Repr]
+ (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Slice[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield {
val f = (from max untilp) - untilp
@@ -1144,22 +1162,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class SplitAt[U >: T, This >: Repr]
+ (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbf, p)
+ for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p)
}
override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
override def requiresStrictSplitters = true
}
protected[this] class TakeWhile[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
@volatile var result: (Combiner[U, This], Boolean) = null
def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
@@ -1168,7 +1187,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else result = (reuse(prev.map(_._1), cbf()), false)
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p)
}
override def merge(that: TakeWhile[U, This]) = if (result._2) {
@@ -1178,23 +1197,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class Span[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
// val lst = pit.toList
// val pa = mutable.ParArray(lst: _*)
// val str = "At leaf we will iterate: " + pa.splitter.toList
- result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised
+ result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised
// println("\nAt leaf result is: " + result)
if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
} else {
- result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
+ result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter())))
}
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p)
+ val pits = pit.splitWithSignalling
+ for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p)
}
override def merge(that: Span[U, This]) = result = if (result._2.size == 0) {
(result._1 combine that.result._1, that.result._2)
@@ -1204,15 +1223,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
+ protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
- val opits = othpit.psplit(sizes: _*)
+ val opits = othpit.psplitWithSignalling(sizes: _*)
(pits zip opits) map { p => new Zip(pbf, p._1, p._2) }
}
override def merge(that: Zip[U, S, That]) = result = result combine that.result
@@ -1220,18 +1239,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class ZipAll[U >: T, S, That]
- (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
+ (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = if (pit.remaining <= len) {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
- val opits = othpit.psplit(sizes: _*)
+ val opits = othpit.psplitWithSignalling(sizes: _*)
((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) }
} else {
- val opits = othpit.psplit(pit.remaining)
+ val opits = othpit.psplitWithSignalling(pit.remaining)
val diff = len - pit.remaining
Seq(
new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed
@@ -1248,7 +1267,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
val plen = p.remaining min (len - untilp)
new CopyToArray[U, This](from + untilp, plen, array, p)
@@ -1257,7 +1276,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], ToParCollection[U, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[U, That]]) {
@@ -1268,7 +1287,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: ToParCollection[U, That]) = result = result combine that.result
}
- protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
+ protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[(K, V), That]]) {
@@ -1305,7 +1324,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else trees(from)
protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
new CreateScanTree(untilp, p.remaining, z, op, p)
}
@@ -1317,11 +1336,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class FromScanTree[U >: T, That]
- (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CanCombineFrom[Repr, U, That])
+ (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That])
extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) {
- val cb = reuse(prev, cbf(self.repr))
+ val cb = reuse(prev, cbf())
iterate(tree, cb)
result = cb
}
@@ -1351,7 +1370,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/* scan tree */
- protected[this] def scanBlockSize = (threshold(size, parallelismLevel) / 2) max 1
+ protected[this] def scanBlockSize = (thresholdFromSize(size, tasksupport.parallelismLevel) / 2) max 1
protected[this] trait ScanTree[U >: T] {
def beginsAt: Int
@@ -1392,6 +1411,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
def print(depth: Int) = println((" " * depth) + this)
}
+ /* alias methods */
+
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+
/* debug information */
private[parallel] def debugInformation = "Parallel collection: " + this.getClass
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 1d7659922c..536139c812 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -47,7 +47,6 @@ extends GenIterableView[T, Coll]
with ParIterableLike[T, This, ThisSeq]
{
self =>
- import tasksupport._
override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
@@ -135,7 +134,7 @@ self =>
newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
+ tasksupport.executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index beb50a41e1..afd1f30903 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -66,7 +66,6 @@ self =>
new IterableSplitter[K] {
i =>
val iter = s
- var signalDelegate: Signalling = IdleSignalling
def hasNext = iter.hasNext
def next() = iter.next._1
def split = {
@@ -84,7 +83,6 @@ self =>
new IterableSplitter[V] {
i =>
val iter = s
- var signalDelegate: Signalling = IdleSignalling
def hasNext = iter.hasNext
def next() = iter.next._2
def split = {
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index d0f38b30dc..9f28a286ca 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -44,39 +44,9 @@ trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, S
extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
- import tasksupport._
-
+
type SuperParIterator = IterableSplitter[T]
- /** An iterator that can be split into arbitrary subsets of iterators.
- * The self-type requirement ensures that the signal context passing behaviour gets mixed in
- * the concrete iterator instance in some concrete collection.
- *
- * '''Note:''' In concrete collection classes, collection implementers might want to override the iterator
- * `reverse2builder` method to ensure higher efficiency.
- */
- trait ParIterator extends SeqSplitter[T] with super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
- def split: Seq[ParIterator]
- def psplit(sizes: Int*): Seq[ParIterator]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParIterator]
- extends ParIterator with super.SignalContextPassingIterator[IterRepr] {
- // Note: See explanation in `ParallelIterableLike.this.SignalContextPassingIterator`
- // to understand why we do the cast here, and have a type parameter.
- // Bottomline: avoiding boilerplate and fighting against inability to override stackable modifications.
- abstract override def psplit(sizes: Int*): Seq[IterRepr] = {
- val pits = super.psplit(sizes: _*)
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
-
/** A more refined version of the iterator found in the `ParallelIterable` trait,
* this iterator can be split into arbitrary subsets of iterators.
*
@@ -89,9 +59,7 @@ self =>
override def size = length
/** Used to iterate elements using indices */
- protected abstract class Elements(start: Int, val end: Int) extends ParIterator with BufferedIterator[T] {
- me: SignalContextPassingIterator[ParIterator] =>
-
+ protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] {
private var i = start
def hasNext = i < end
@@ -106,14 +74,14 @@ self =>
final def remaining = end - i
- def dup = new Elements(i, end) with SignalContextPassingIterator[ParIterator]
+ def dup = new Elements(i, end) {}
def split = psplit(remaining / 2, remaining - remaining / 2)
def psplit(sizes: Int*) = {
val incr = sizes.scanLeft(0)(_ + _)
for ((from, until) <- incr.init zip incr.tail) yield {
- new Elements(start + from, (start + until) min end) with SignalContextPassingIterator[ParIterator]
+ new Elements(start + from, (start + until) min end) {}
}
}
@@ -138,7 +106,7 @@ self =>
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1
+ tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1
}
/** Finds the first element satisfying some predicate.
@@ -156,7 +124,7 @@ self =>
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))
+ tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))
}
/** Finds the last element satisfying some predicate.
@@ -174,18 +142,20 @@ self =>
val until = if (end >= length) length else end + 1
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MinValue)
- executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx))
+ tasksupport.executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx))
}
def reverse: Repr = {
- executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport })
}
def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result })
- } else seq.reverseMap(f)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(
+ new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport }
+ )
+ } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport)
/*bf ifParallel { pbf =>
- executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result })
} otherwise seq.reverseMap(f)(bf2seq(bf))*/
/** Tests whether this $coll contains the given sequence at a given index.
@@ -203,13 +173,15 @@ self =>
else if (pthat.length > length - offset) false
else {
val ctx = new DefaultSignalling with VolatileAbort
- executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter))
+ tasksupport.executeAndWaitResult(
+ new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter)
+ )
}
} otherwise seq.startsWith(that, offset)
override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter))
+ length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter))
} otherwise seq.sameElements(that)
/** Tests whether this $coll ends with the given parallel sequence.
@@ -226,24 +198,24 @@ self =>
else {
val ctx = new DefaultSignalling with VolatileAbort
val tlen = that.length
- executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter))
+ tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter))
}
} otherwise seq.endsWith(that)
def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
val realreplaced = replaced min (length - from)
- if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
+ if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
val that = patch.asParSeq
- val pbf = bf.asParallel
- val pits = splitter.psplit(from, replaced, length - from - realreplaced)
- val copystart = new Copy[U, That](() => pbf(repr), pits(0))
+ val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced)
+ val cfactory = combinerFactory(() => bf(repr).asCombiner)
+ val copystart = new Copy[U, That](cfactory, pits(0))
val copymiddle = wrap {
- val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter)
+ val tsk = new that.Copy[U, That](cfactory, that.splitter)
tasksupport.executeAndWaitResult(tsk)
}
- val copyend = new Copy[U, That](() => pbf(repr), pits(2))
- executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
- _.result
+ val copyend = new Copy[U, That](cfactory, pits(2))
+ tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
+ _.resultWithTaskSupport
})
} else patch_sequential(from, patch.seq, replaced)
}
@@ -252,18 +224,22 @@ self =>
val from = 0 max fromarg
val b = bf(repr)
val repl = (r min (length - from)) max 0
- val pits = splitter.psplit(from, repl, length - from - repl)
+ val pits = splitter.psplitWithSignalling(from, repl, length - from - repl)
b ++= pits(0)
b ++= patch
b ++= pits(2)
- b.result
+ setTaskSupport(b.result, tasksupport)
}
def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new Updated(index, elem, () => bf(repr).asCombiner, splitter) mapResult { _.result })
- } else seq.updated(index, elem)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(
+ new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult {
+ _.resultWithTaskSupport
+ }
+ )
+ } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport)
/*bf ifParallel { pbf =>
- executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result })
} otherwise seq.updated(index, elem)(bf2seq(bf))*/
def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
@@ -278,10 +254,13 @@ self =>
patch(length, new immutable.Repetition(elem, len - length), 0)
} else patch(length, Nil, 0);
- override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
- val pbf = bf.asParallel
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(length min thatseq.length, pbf, splitter, thatseq.splitter) mapResult { _.result });
+ tasksupport.executeAndWaitResult(
+ new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
+ _.resultWithTaskSupport
+ }
+ );
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
@@ -298,7 +277,7 @@ self =>
*/
def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter))
+ length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter))
} otherwise seq.corresponds(that)(p)
def diff[U >: T](that: GenSeq[U]): Repr = sequentially {
@@ -372,7 +351,7 @@ self =>
} else result = (0, false)
protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p)
}
override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2)
@@ -391,7 +370,7 @@ self =>
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
}
override def merge(that: IndexWhere) = result = if (result == -1) that.result else {
@@ -412,7 +391,7 @@ self =>
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
}
override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else {
@@ -437,7 +416,7 @@ self =>
override def merge(that: ReverseMap[S, That]) = result = that.result combine result
}
- protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U])
+ protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U])
extends Accessor[Boolean, SameElements[U]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -448,44 +427,44 @@ self =>
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new SameElements(p, op)
+ for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op)
}
override def merge(that: SameElements[U]) = result = result && that.result
override def requiresStrictSplitters = true
}
- protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: () => Combiner[U, That], protected[this] val pit: SeqSplitter[T])
+ protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CombinerFactory[U, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf())
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
}
override def merge(that: Updated[U, That]) = result = result combine that.result
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](len: Int, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
+ protected[this] class Zip[U >: T, S, That](len: Int, cf: CombinerFactory[(U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf())
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
val fp = len / 2
val sp = len - len / 2
- val pits = pit.psplit(fp, sp)
- val opits = otherpit.psplit(fp, sp)
+ val pits = pit.psplitWithSignalling(fp, sp)
+ val opits = otherpit.psplitWithSignalling(fp, sp)
Seq(
- new Zip(fp, pbf, pits(0), opits(0)),
- new Zip(sp, pbf, pits(1), opits(1))
+ new Zip(fp, cf, pits(0), opits(0)),
+ new Zip(sp, cf, pits(1), opits(1))
)
}
override def merge(that: Zip[U, S, That]) = result = result combine that.result
}
- protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S])
+ protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Accessor[Boolean, Corresponds[S]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -496,7 +475,7 @@ self =>
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new Corresponds(corr, p, op)
+ for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op)
}
override def merge(that: Corresponds[S]) = result = result && that.result
override def requiresStrictSplitters = true
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 6fdc181793..e0d1a7d6ff 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -38,7 +38,6 @@ extends GenSeqView[T, Coll]
with ParSeqLike[T, This, ThisSeq]
{
self =>
- import tasksupport._
trait Transformed[+S] extends ParSeqView[S, Coll, CollSeq]
with super[ParIterableView].Transformed[S] with super[GenSeqViewLike].Transformed[S] {
@@ -170,7 +169,7 @@ self =>
override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanRight(z)(op)).asInstanceOf[That]
override def groupBy[K](f: T => K): immutable.ParMap[K, This] = thisParSeq.groupBy(f).map(kv => (kv._1, newForced(kv._2).asInstanceOf[This]))
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
+ tasksupport.executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index e04e0e9c72..c5910ff2c8 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
+import scala.collection.generic.IdleSignalling
import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
import scala.collection.Iterator.empty
@@ -27,6 +28,11 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
* This method doesn't change the state of the iterator.
*/
def remaining: Int
+
+ /** For most collections, this is a cheap operation.
+ * Exceptions can override this method.
+ */
+ def isRemainingCheap = true
}
@@ -111,7 +117,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
while (hasNext) cb += f(next)
cb
}
@@ -136,7 +142,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = {
- b.sizeHint(remaining)
+ if (isRemainingCheap) b.sizeHint(remaining)
while (hasNext) b += next
b
}
@@ -178,7 +184,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(n)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
while (hasNext) cb += next
cb
}
@@ -196,7 +202,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = {
before.sizeHint(at)
- after.sizeHint(remaining - at)
+ if (isRemainingCheap) after.sizeHint(remaining - at)
var left = at
while (left > 0) {
before += next
@@ -222,7 +228,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
val curr = next
if (p(curr)) before += curr
else {
- after.sizeHint(remaining + 1)
+ if (isRemainingCheap) after.sizeHint(remaining + 1)
after += curr
isBefore = false
}
@@ -262,7 +268,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
- cb.sizeHint(remaining min otherpit.remaining)
+ if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining)
while (hasNext && otherpit.hasNext) {
cb += ((next, otherpit.next))
}
@@ -270,7 +276,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
- cb.sizeHint(remaining max that.remaining)
+ if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining)
while (this.hasNext && that.hasNext) cb += ((this.next, that.next))
while (this.hasNext) cb += ((this.next, thatelem))
while (that.hasNext) cb += ((thiselem, that.next))
@@ -329,7 +335,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
/* transformers */
def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = {
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[T]()
while (hasNext) lst ::= next
while (lst != Nil) {
@@ -341,7 +347,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = cbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[S]()
while (hasNext) lst ::= f(next)
while (lst != Nil) {
@@ -353,7 +359,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = {
//val cb = cbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var j = 0
while (hasNext) {
if (j == index) {
@@ -381,11 +387,21 @@ extends AugmentedIterableIterator[T]
{
self =>
+ var signalDelegate: Signalling = IdleSignalling
+
/** Creates a copy of this iterator. */
def dup: IterableSplitter[T]
def split: Seq[IterableSplitter[T]]
+ def splitWithSignalling: Seq[IterableSplitter[T]] = {
+ val pits = split
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
+ def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel)
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
*
@@ -421,7 +437,6 @@ self =>
/* iterator transformers */
class Taken(taken: Int) extends IterableSplitter[T] {
- var signalDelegate = self.signalDelegate
var remaining = taken min self.remaining
def hasNext = remaining > 0
def next = { remaining -= 1; self.next }
@@ -450,7 +465,7 @@ self =>
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
class Mapped[S](f: T => S) extends IterableSplitter[S] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext
def next = f(self.next)
def remaining = self.remaining
@@ -461,7 +476,7 @@ self =>
override def map[S](f: T => S) = new Mapped(f)
class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
protected var curr: IterableSplitter[U] = self
def hasNext = if (curr.hasNext) true else if (curr eq self) {
curr = that
@@ -480,7 +495,7 @@ self =>
def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that)
class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
def remaining = self.remaining min that.remaining
@@ -497,7 +512,7 @@ self =>
class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
extends IterableSplitter[(U, S)] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
if (that.hasNext) (self.next, that.next)
@@ -534,6 +549,18 @@ self =>
def split: Seq[SeqSplitter[T]]
def psplit(sizes: Int*): Seq[SeqSplitter[T]]
+ override def splitWithSignalling: Seq[SeqSplitter[T]] = {
+ val pits = split
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
+ def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = {
+ val pits = psplit(sizes: _*)
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
* method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number
@@ -626,13 +653,13 @@ self =>
def reverse: SeqSplitter[T] = {
val pa = mutable.ParArray.fromTraversables(self).reverse
- new pa.ParArrayIterator with pa.SCPI {
+ new pa.ParArrayIterator {
override def reverse = self
}
}
class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
private[this] val trio = {
val pits = self.psplit(from, replaced, self.remaining - from - replaced)
(pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2)
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 20800250b4..59b75f523f 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -11,15 +11,46 @@ package scala.collection.parallel
+import java.util.concurrent.ThreadPoolExecutor
+import scala.concurrent.forkjoin.ForkJoinPool
+import scala.concurrent.ExecutionContext
-
+/** A trait implementing the scheduling of
+ * a parallel collection operation.
+ *
+ * Task support objects handle how a task is split and
+ * distributed across processors. A task support object can be
+ * changed in a parallel collection after it has been created,
+ * but only during a quiescent period, i.e. while there are no
+ * concurrent invocations to parallel collection methods.
+ */
trait TaskSupport extends Tasks
-private[collection] class ForkJoinTaskSupport extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
-private[collection] class ThreadPoolTaskSupport extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
+/** A task support that uses a fork join pool to schedule tasks */
+class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool)
+extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
+
+
+/** A task support that uses a thread pool executor to schedule tasks */
+class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
+extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
+
+
+/** A task support that uses an execution context to schedule tasks.
+ *
+ * It can be used with the default execution context implementation in the `scala.concurrent` package.
+ * It internally forwards the call to either a forkjoin based task support or a thread pool executor one,
+ * depending on what the execution context uses.
+ *
+ * By default, parallel collections are parametrized with this task support object, so parallel collections
+ * share the same execution context backend as the rest of the `scala.concurrent` package.
+ */
+class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.executionContext)
+extends TaskSupport with ExecutionContextTasks
+
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index b705909cad..60a8bb1ed6 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -6,109 +6,109 @@
** |/ **
\* */
-
package scala.collection.parallel
+import java.util.concurrent.ThreadPoolExecutor
import scala.concurrent.forkjoin._
+import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
-
import annotation.unchecked.uncheckedVariance
+trait Task[R, +Tp] {
+ type Result = R
-/** A trait that declares task execution capabilities used
- * by parallel collections.
- */
-trait Tasks {
-
- private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
-
- private[parallel] def debuglog(s: String) = synchronized {
- debugMessages += s
- }
-
- trait Task[R, +Tp] {
- type Result = R
+ def repr = this.asInstanceOf[Tp]
- def repr = this.asInstanceOf[Tp]
-
- /** Body of the task - non-divisible unit of work done by this task.
- * Optionally is provided with the result from the previous completed task
- * or `None` if there was no previous task (or the previous task is uncompleted or unknown).
- */
- def leaf(result: Option[R])
+ /** Body of the task - non-divisible unit of work done by this task.
+ * Optionally is provided with the result from the previous completed task
+ * or `None` if there was no previous task (or the previous task is uncompleted or unknown).
+ */
+ def leaf(result: Option[R])
- /** A result that can be accessed once the task is completed. */
- var result: R
+ /** A result that can be accessed once the task is completed. */
+ var result: R
- /** Decides whether or not this task should be split further. */
- def shouldSplitFurther: Boolean
+ /** Decides whether or not this task should be split further. */
+ def shouldSplitFurther: Boolean
- /** Splits this task into a list of smaller tasks. */
- private[parallel] def split: Seq[Task[R, Tp]]
+ /** Splits this task into a list of smaller tasks. */
+ private[parallel] def split: Seq[Task[R, Tp]]
- /** Read of results of `that` task and merge them into results of this one. */
- private[parallel] def merge(that: Tp @uncheckedVariance) {}
+ /** Read of results of `that` task and merge them into results of this one. */
+ private[parallel] def merge(that: Tp @uncheckedVariance) {}
- // exception handling mechanism
- @volatile var throwable: Throwable = null
- def forwardThrowable() = if (throwable != null) throw throwable
+ // exception handling mechanism
+ @volatile var throwable: Throwable = null
+ def forwardThrowable() = if (throwable != null) throw throwable
- // tries to do the leaf computation, storing the possible exception
- private[parallel] def tryLeaf(lastres: Option[R]) {
- try {
- tryBreakable {
- leaf(lastres)
- result = result // ensure that effects of `leaf` are visible to readers of `result`
- } catchBreak {
- signalAbort
- }
- } catch {
- case thr: Exception =>
- result = result // ensure that effects of `leaf` are visible
- throwable = thr
- signalAbort
+ // tries to do the leaf computation, storing the possible exception
+ private[parallel] def tryLeaf(lastres: Option[R]) {
+ try {
+ tryBreakable {
+ leaf(lastres)
+ result = result // ensure that effects of `leaf` are visible to readers of `result`
+ } catchBreak {
+ signalAbort
}
+ } catch {
+ case thr: Exception =>
+ result = result // ensure that effects of `leaf` are visible
+ throwable = thr
+ signalAbort
}
+ }
- private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
- val that = t.asInstanceOf[Task[R, Tp]]
- val local = result // ensure that any effects of modifying `result` are detected
- // checkMerge(that)
- if (this.throwable == null && that.throwable == null) merge(t)
- mergeThrowables(that)
- }
+ private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
+ val that = t.asInstanceOf[Task[R, Tp]]
+ val local = result // ensure that any effects of modifying `result` are detected
+ // checkMerge(that)
+ if (this.throwable == null && that.throwable == null) merge(t)
+ mergeThrowables(that)
+ }
- private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
- if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
- println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
- } else if (this.throwable != null || that.throwable != null) {
- println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
- }
+ private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
+ if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
+ println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
+ } else if (this.throwable != null || that.throwable != null) {
+ println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
}
+ }
- private[parallel] def mergeThrowables(that: Task[_, _]) {
- if (this.throwable != null && that.throwable != null) {
- // merge exceptions, since there were multiple exceptions
- this.throwable = this.throwable alongWith that.throwable
- } else if (that.throwable != null) this.throwable = that.throwable
+ private[parallel] def mergeThrowables(that: Task[_, _]) {
+ if (this.throwable != null && that.throwable != null) {
+ // merge exceptions, since there were multiple exceptions
+ this.throwable = this.throwable alongWith that.throwable
+ } else if (that.throwable != null) this.throwable = that.throwable
else this.throwable = this.throwable
- }
+ }
+
+ // override in concrete task implementations to signal abort to other tasks
+ private[parallel] def signalAbort() {}
+}
+
+
+/** A trait that declares task execution capabilities used
+ * by parallel collections.
+ */
+trait Tasks {
- // override in concrete task implementations to signal abort to other tasks
- private[parallel] def signalAbort() {}
+ private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
+
+ private[parallel] def debuglog(s: String) = synchronized {
+ debugMessages += s
}
- trait TaskImpl[R, +Tp] {
+ trait WrappedTask[R, +Tp] {
/** the body of this task - what it executes, how it gets split and how results are merged. */
val body: Task[R, Tp]
- def split: Seq[TaskImpl[R, Tp]]
+ def split: Seq[WrappedTask[R, Tp]]
/** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */
def compute()
/** Start task. */
@@ -129,13 +129,10 @@ trait Tasks {
def release() {}
}
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
-
/* task control */
- // safe to assume it will always have the same type,
- // because the `tasksupport` in parallel iterable is final
- var environment: AnyRef
+ /** The type of the environment is more specific in the implementations. */
+ val environment: AnyRef
/** Executes a task and returns a future. Forwards an exception if some task threw it. */
def execute[R, Tp](fjtask: Task[R, Tp]): () => R
@@ -155,11 +152,11 @@ trait Tasks {
*/
trait AdaptiveWorkStealingTasks extends Tasks {
- trait TaskImpl[R, Tp] extends super.TaskImpl[R, Tp] {
- @volatile var next: TaskImpl[R, Tp] = null
+ trait WrappedTask[R, Tp] extends super.WrappedTask[R, Tp] {
+ @volatile var next: WrappedTask[R, Tp] = null
@volatile var shouldWaitFor = true
- def split: Seq[TaskImpl[R, Tp]]
+ def split: Seq[WrappedTask[R, Tp]]
def compute() = if (body.shouldSplitFurther) {
internal()
@@ -171,12 +168,12 @@ trait AdaptiveWorkStealingTasks extends Tasks {
def internal() = {
var last = spawnSubtasks()
-
+
last.body.tryLeaf(None)
last.release()
body.result = last.body.result
body.throwable = last.body.throwable
-
+
while (last.next != null) {
// val lastresult = Option(last.body.result)
val beforelast = last
@@ -193,10 +190,10 @@ trait AdaptiveWorkStealingTasks extends Tasks {
body.tryMerge(last.body.repr)
}
}
-
+
def spawnSubtasks() = {
- var last: TaskImpl[R, Tp] = null
- var head: TaskImpl[R, Tp] = this
+ var last: WrappedTask[R, Tp] = null
+ var head: WrappedTask[R, Tp] = this
do {
val subtasks = head.split
head = subtasks.head
@@ -222,7 +219,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
}
// specialize ctor
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
}
@@ -231,13 +228,13 @@ trait AdaptiveWorkStealingTasks extends Tasks {
trait ThreadPoolTasks extends Tasks {
import java.util.concurrent._
- trait TaskImpl[R, +Tp] extends Runnable with super.TaskImpl[R, Tp] {
+ trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
// initially, this is null
// once the task is started, this future is set and used for `sync`
// utb: var future: Future[_] = null
@volatile var owned = false
@volatile var completed = false
-
+
def start() = synchronized {
// debuglog("Starting " + body)
// utb: future = executor.submit(this)
@@ -293,9 +290,9 @@ trait ThreadPoolTasks extends Tasks {
}
}
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
- var environment: AnyRef = ThreadPoolTasks.defaultThreadPool
+ val environment: ThreadPoolExecutor
def executor = environment.asInstanceOf[ThreadPoolExecutor]
def queue = executor.getQueue.asInstanceOf[LinkedBlockingQueue[Runnable]]
@volatile var totaltasks = 0
@@ -309,7 +306,7 @@ trait ThreadPoolTasks extends Tasks {
}
def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing without wait: " + task)
t.start()
@@ -322,11 +319,11 @@ trait ThreadPoolTasks extends Tasks {
}
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing with wait: " + task)
t.start()
-
+
t.sync()
t.body.forwardThrowable
t.body.result
@@ -362,10 +359,11 @@ object ThreadPoolTasks {
/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */
+@deprecated("This implementation is not used.")
trait FutureThreadPoolTasks extends Tasks {
import java.util.concurrent._
- trait TaskImpl[R, +Tp] extends Runnable with super.TaskImpl[R, Tp] {
+ trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
@volatile var future: Future[_] = null
def start() = {
@@ -380,13 +378,13 @@ trait FutureThreadPoolTasks extends Tasks {
}
}
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
- var environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
+ val environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
def executor = environment.asInstanceOf[ThreadPoolExecutor]
def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing without wait: " + task)
t.start
@@ -399,7 +397,7 @@ trait FutureThreadPoolTasks extends Tasks {
}
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing with wait: " + task)
t.start
@@ -441,26 +439,26 @@ trait HavingForkJoinPool {
*/
trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
- trait TaskImpl[R, +Tp] extends RecursiveAction with super.TaskImpl[R, Tp] {
+ trait WrappedTask[R, +Tp] extends RecursiveAction with super.WrappedTask[R, Tp] {
def start() = fork
def sync() = join
def tryCancel = tryUnfork
}
// specialize ctor
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
/** The fork/join pool of this collection.
*/
def forkJoinPool: ForkJoinPool = environment.asInstanceOf[ForkJoinPool]
- var environment: AnyRef = ForkJoinTasks.defaultForkJoinPool
+ val environment: ForkJoinPool
/** Executes a task and does not wait for it to finish - instead returns a future.
*
* $fjdispatch
*/
def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val fjtask = newTaskImpl(task)
+ val fjtask = newWrappedTask(task)
if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
fjtask.fork
@@ -483,7 +481,7 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
* @return the result of the task
*/
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val fjtask = newTaskImpl(task)
+ val fjtask = newWrappedTask(task)
if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
fjtask.fork
@@ -513,25 +511,50 @@ object ForkJoinTasks {
*/
trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks {
- class TaskImpl[R, Tp](val body: Task[R, Tp])
- extends super[ForkJoinTasks].TaskImpl[R, Tp] with super[AdaptiveWorkStealingTasks].TaskImpl[R, Tp] {
- def split = body.split.map(b => newTaskImpl(b))
+ class WrappedTask[R, Tp](val body: Task[R, Tp])
+ extends super[ForkJoinTasks].WrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].WrappedTask[R, Tp] {
+ def split = body.split.map(b => newWrappedTask(b))
}
- def newTaskImpl[R, Tp](b: Task[R, Tp]) = new TaskImpl[R, Tp](b)
+ def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
}
trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks {
- class TaskImpl[R, Tp](val body: Task[R, Tp])
- extends super[ThreadPoolTasks].TaskImpl[R, Tp] with super[AdaptiveWorkStealingTasks].TaskImpl[R, Tp] {
- def split = body.split.map(b => newTaskImpl(b))
+ class WrappedTask[R, Tp](val body: Task[R, Tp])
+ extends super[ThreadPoolTasks].WrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].WrappedTask[R, Tp] {
+ def split = body.split.map(b => newWrappedTask(b))
}
- def newTaskImpl[R, Tp](b: Task[R, Tp]) = new TaskImpl[R, Tp](b)
+ def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
+
+}
+
+trait ExecutionContextTasks extends Tasks {
+
+ def executionContext = environment
+
+ val environment: ExecutionContext
+
+ // this part is a hack which allows switching
+ val driver: Tasks = executionContext match {
+ case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executorService match {
+ case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp)
+ case tpe: ThreadPoolExecutor => new ThreadPoolTaskSupport(tpe)
+ case _ => ???
+ }
+ case _ => ???
+ }
+
+ def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task
+
+ def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task
+
+ def parallelismLevel = driver.parallelismLevel
+
}
@@ -541,3 +564,6 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
+
+
+
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index e785932933..266b179401 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -8,6 +8,8 @@
package scala.collection.parallel.immutable
+
+
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.parallel.IterableSplitter
@@ -19,6 +21,9 @@ import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.immutable.{ HashMap, TrieIterator }
import annotation.unchecked.uncheckedVariance
+import collection.parallel.Task
+
+
/** Immutable parallel hash map, based on hash tries.
*
@@ -52,7 +57,7 @@ self =>
protected[this] override def newCombiner = HashMapCombiner[K, V]
- def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size)
override def seq = trie
@@ -69,11 +74,8 @@ self =>
case None => newc
}
- type SCPI = SignalContextPassingIterator[ParHashMapIterator]
-
class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int)
- extends super.ParIterator {
- self: SignalContextPassingIterator[ParHashMapIterator] =>
+ extends IterableSplitter[(K, V)] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
@@ -84,24 +86,24 @@ self =>
dupFromIterator(buff.iterator)
}
private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = {
- val phit = new ParHashMapIterator(it, sz) with SCPI
+ val phit = new ParHashMapIterator(it, sz)
phit.i = i
phit
}
- def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
+ def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
- new ParHashMapIterator(fst, fstlength) with SCPI,
- new ParHashMapIterator(snd, sndlength) with SCPI
+ new ParHashMapIterator(fst, fstlength),
+ new ParHashMapIterator(snd, sndlength)
)
case _ =>
// iterator of the collision map case
val buff = triter.toBuffer
val (fp, sp) = buff.splitAt(buff.length / 2)
- Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) with SCPI }
+ Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) }
}
def next(): (K, V) = {
i += 1
@@ -156,7 +158,6 @@ private[parallel] abstract class HashMapCombiner[K, V]
extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
import HashMapCombiner._
- import collection.parallel.tasksupport._
val emptyTrie = HashMap.empty[K, V]
def +=(elem: (K, V)) = {
@@ -176,7 +177,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashMap[K, V]](bucks.length)
- executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
+ combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
var bitmap = 0
var i = 0
@@ -198,7 +199,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashMap[K, AnyRef]](bucks.length)
- executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length))
+ combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length))
var bitmap = 0
var i = 0
@@ -259,7 +260,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int)
@@ -324,7 +325,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 8332167b90..0d7f04976e 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -8,6 +8,8 @@
package scala.collection.parallel.immutable
+
+
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
import scala.collection.parallel.IterableSplitter
@@ -19,6 +21,9 @@ import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericCompanion
import scala.collection.immutable.{ HashSet, TrieIterator }
+import collection.parallel.Task
+
+
/** Immutable parallel hash set, based on hash tries.
*
@@ -49,7 +54,7 @@ self =>
override def empty: ParHashSet[T] = new ParHashSet[T]
- def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size)
override def seq = trie
@@ -66,11 +71,8 @@ self =>
case None => newc
}
- type SCPI = SignalContextPassingIterator[ParHashSetIterator]
-
class ParHashSetIterator(var triter: Iterator[T], val sz: Int)
- extends super.ParIterator {
- self: SignalContextPassingIterator[ParHashSetIterator] =>
+ extends IterableSplitter[T] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
@@ -81,24 +83,24 @@ self =>
dupFromIterator(buff.iterator)
}
private def dupFromIterator(it: Iterator[T]) = {
- val phit = new ParHashSetIterator(it, sz) with SCPI
+ val phit = new ParHashSetIterator(it, sz)
phit.i = i
phit
}
- def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
+ def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
- new ParHashSetIterator(fst, fstlength) with SCPI,
- new ParHashSetIterator(snd, sndlength) with SCPI
+ new ParHashSetIterator(fst, fstlength),
+ new ParHashSetIterator(snd, sndlength)
)
case _ =>
// iterator of the collision map case
val buff = triter.toBuffer
val (fp, sp) = buff.splitAt(buff.length / 2)
- Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) with SCPI }
+ Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) }
}
def next(): T = {
i += 1
@@ -111,6 +113,7 @@ self =>
}
}
+
/** $factoryInfo
* @define Coll immutable.ParHashSet
* @define coll immutable parallel hash set
@@ -124,11 +127,11 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
def fromTrie[T](t: HashSet[T]) = new ParHashSet(t)
}
+
private[immutable] abstract class HashSetCombiner[T]
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
import HashSetCombiner._
- import collection.parallel.tasksupport._
val emptyTrie = HashSet.empty[T]
def +=(elem: T) = {
@@ -148,7 +151,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashSet[T]](bucks.length)
- executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
+ combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
var bitmap = 0
var i = 0
@@ -203,10 +206,11 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
+
object HashSetCombiner {
def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {}
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 350e64739f..64e07ce4ff 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -10,6 +10,7 @@ package scala.collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
+import scala.collection.parallel.SeqSplitter
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.IterableSplitter
import scala.collection.Iterator
@@ -41,13 +42,10 @@ self =>
@inline final def apply(idx: Int) = range.apply(idx);
- def splitter = new ParRangeIterator with SCPI
-
- type SCPI = SignalContextPassingIterator[ParRangeIterator]
+ def splitter = new ParRangeIterator
class ParRangeIterator(range: Range = self.range)
- extends ParIterator {
- me: SignalContextPassingIterator[ParRangeIterator] =>
+ extends SeqSplitter[Int] {
override def toString = "ParRangeIterator(over: " + range + ")"
private var ind = 0
private val len = range.length
@@ -64,15 +62,15 @@ self =>
private def rangeleft = range.drop(ind)
- def dup = new ParRangeIterator(rangeleft) with SCPI
+ def dup = new ParRangeIterator(rangeleft)
def split = {
val rleft = rangeleft
val elemleft = rleft.length
- if (elemleft < 2) Seq(new ParRangeIterator(rleft) with SCPI)
+ if (elemleft < 2) Seq(new ParRangeIterator(rleft))
else Seq(
- new ParRangeIterator(rleft.take(elemleft / 2)) with SCPI,
- new ParRangeIterator(rleft.drop(elemleft / 2)) with SCPI
+ new ParRangeIterator(rleft.take(elemleft / 2)),
+ new ParRangeIterator(rleft.drop(elemleft / 2))
)
}
@@ -81,7 +79,7 @@ self =>
for (sz <- sizes) yield {
val fronttaken = rleft.take(sz)
rleft = rleft.drop(sz)
- new ParRangeIterator(fronttaken) with SCPI
+ new ParRangeIterator(fronttaken)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index fdeaefc3ff..5d9c431bc1 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -48,22 +48,19 @@ extends ParSeq[T]
def this() = this(Vector())
- type SCPI = SignalContextPassingIterator[ParVectorIterator]
-
def apply(idx: Int) = vector.apply(idx)
def length = vector.length
def splitter: SeqSplitter[T] = {
- val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI
+ val pit = new ParVectorIterator(vector.startIndex, vector.endIndex)
vector.initIterator(pit)
pit
}
override def seq: Vector[T] = vector
- class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator {
- self: SCPI =>
+ class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
def split: Seq[ParVectorIterator] = {
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 7b1e39d092..63635537d7 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -22,23 +22,19 @@ package immutable {
override def seq = throw new UnsupportedOperationException
def update(idx: Int, elem: T) = throw new UnsupportedOperationException
- type SCPI = SignalContextPassingIterator[ParIterator]
-
- class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
-
+ class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] {
def remaining = until - i
def hasNext = i < until
def next = { i += 1; elem }
- def dup = new ParIterator(i, until, elem) with SCPI
+ def dup = new ParIterator(i, until, elem)
def psplit(sizes: Int*) = {
val incr = sizes.scanLeft(0)(_ + _)
- for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) with SCPI
+ for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem)
}
def split = psplit(remaining / 2, remaining - remaining / 2)
}
- def splitter = new ParIterator with SCPI
+ def splitter = new ParIterator
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index a1eb3beb0c..5c3da66be0 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -19,7 +19,9 @@ import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParFactory
import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
+import scala.collection.parallel.SeqSplitter
import scala.collection.parallel.ParSeqLike
+import scala.collection.parallel.Task
import scala.collection.parallel.CHECK_RATE
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.Builder
@@ -55,7 +57,6 @@ extends ParSeq[T]
with Serializable
{
self =>
- import collection.parallel.tasksupport._
@transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]]
@@ -74,17 +75,13 @@ self =>
override def seq = arrayseq
- type SCPI = SignalContextPassingIterator[ParArrayIterator]
-
protected[parallel] def splitter: ParArrayIterator = {
- val pit = new ParArrayIterator with SCPI
+ val pit = new ParArrayIterator
pit
}
class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array)
- extends super.ParIterator {
- me: SignalContextPassingIterator[ParArrayIterator] =>
-
+ extends SeqSplitter[T] {
def hasNext = i < until
def next = {
@@ -95,9 +92,9 @@ self =>
def remaining = until - i
- def dup = new ParArrayIterator(i, until, arr) with SCPI
+ def dup = new ParArrayIterator(i, until, arr)
- def psplit(sizesIncomplete: Int*): Seq[ParIterator] = {
+ def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = {
var traversed = i
val total = sizesIncomplete.reduceLeft(_ + _)
val left = remaining
@@ -106,19 +103,19 @@ self =>
val start = traversed
val end = (traversed + sz) min until
traversed = end
- new ParArrayIterator(start, end, arr) with SCPI
+ new ParArrayIterator(start, end, arr)
} else {
- new ParArrayIterator(traversed, traversed, arr) with SCPI
+ new ParArrayIterator(traversed, traversed, arr)
}
}
- override def split: Seq[ParIterator] = {
+ override def split: Seq[ParArrayIterator] = {
val left = remaining
if (left >= 2) {
val splitpoint = left / 2
val sq = Seq(
- new ParArrayIterator(i, i + splitpoint, arr) with SCPI,
- new ParArrayIterator(i + splitpoint, until, arr) with SCPI)
+ new ParArrayIterator(i, i + splitpoint, arr),
+ new ParArrayIterator(i + splitpoint, until, arr))
i = until
sq
} else {
@@ -587,22 +584,22 @@ self =>
val targetarr = targarrseq.array.asInstanceOf[Array[Any]]
// fill it in parallel
- executeAndWaitResult(new Map[S](f, targetarr, 0, length))
+ tasksupport.executeAndWaitResult(new Map[S](f, targetarr, 0, length))
// wrap it into a parallel array
(new ParArray[S](targarrseq)).asInstanceOf[That]
} else super.map(f)(bf)
override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That =
- if (parallelismLevel > 1 && buildsArray(cbf(repr))) {
+ if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) {
// reserve an array
val targarrseq = new ArraySeq[U](length + 1)
val targetarr = targarrseq.array.asInstanceOf[Array[Any]]
targetarr(0) = z
// do a parallel prefix scan
- if (length > 0) executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult {
- tree => executeAndWaitResult(new ScanToArray(tree, z, op, targetarr))
+ if (length > 0) tasksupport.executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult {
+ tree => tasksupport.executeAndWaitResult(new ScanToArray(tree, z, op, targetarr))
})
// wrap the array into a parallel array
@@ -664,7 +661,7 @@ self =>
val fp = howmany / 2
List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
}
/* serialization */
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
new file mode 100644
index 0000000000..470972adad
--- /dev/null
+++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
@@ -0,0 +1,193 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel.mutable
+
+
+
+import scala.collection.generic._
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.IterableSplitter
+import scala.collection.parallel.Task
+import scala.collection.mutable.BasicNode
+import scala.collection.mutable.TNode
+import scala.collection.mutable.LNode
+import scala.collection.mutable.CNode
+import scala.collection.mutable.SNode
+import scala.collection.mutable.INode
+import scala.collection.mutable.Ctrie
+import scala.collection.mutable.CtrieIterator
+
+
+
+/** Parallel Ctrie collection.
+ *
+ * It has its bulk operations parallelized, but uses the snapshot operation
+ * to create the splitter. This means that parallel bulk operations can be
+ * called concurrently with the modifications.
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.10
+ */
+final class ParCtrie[K, V] private[collection] (private val ctrie: Ctrie[K, V])
+extends ParMap[K, V]
+ with GenericParMapTemplate[K, V, ParCtrie]
+ with ParMapLike[K, V, ParCtrie[K, V], Ctrie[K, V]]
+ with ParCtrieCombiner[K, V]
+ with Serializable
+{
+ def this() = this(new Ctrie)
+
+ override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie
+
+ override def empty: ParCtrie[K, V] = ParCtrie.empty
+
+ protected[this] override def newCombiner = ParCtrie.newCombiner
+
+ override def seq = ctrie
+
+ def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true)
+
+ override def clear() = ctrie.clear()
+
+ def result = this
+
+ def get(key: K): Option[V] = ctrie.get(key)
+
+ def put(key: K, value: V): Option[V] = ctrie.put(key, value)
+
+ def update(key: K, value: V): Unit = ctrie.update(key, value)
+
+ def remove(key: K): Option[V] = ctrie.remove(key)
+
+ def +=(kv: (K, V)): this.type = {
+ ctrie.+=(kv)
+ this
+ }
+
+ def -=(key: K): this.type = {
+ ctrie.-=(key)
+ this
+ }
+
+ override def size = {
+ val in = ctrie.readRoot()
+ val r = in.gcasRead(ctrie)
+ r match {
+ case tn: TNode[_, _] => tn.cachedSize(ctrie)
+ case ln: LNode[_, _] => ln.cachedSize(ctrie)
+ case cn: CNode[_, _] =>
+ tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array))
+ cn.cachedSize(ctrie)
+ }
+ }
+
+ override def stringPrefix = "ParCtrie"
+
+ /* tasks */
+
+ /** Computes Ctrie size in parallel. */
+ class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] {
+ var result = -1
+ def leaf(prev: Option[Int]) = {
+ var sz = 0
+ var i = offset
+ val until = offset + howmany
+ while (i < until) {
+ array(i) match {
+ case sn: SNode[_, _] => sz += 1
+ case in: INode[K, V] => sz += in.cachedSize(ctrie)
+ }
+ i += 1
+ }
+ result = sz
+ }
+ def split = {
+ val fp = howmany / 2
+ Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array))
+ }
+ def shouldSplitFurther = howmany > 1
+ override def merge(that: Size) = result = result + that.result
+ }
+
+}
+
+
+private[collection] class ParCtrieSplitter[K, V](lev: Int, ct: Ctrie[K, V], mustInit: Boolean)
+extends CtrieIterator[K, V](lev, ct, mustInit)
+ with IterableSplitter[(K, V)]
+{
+ // only evaluated if `remaining` is invoked (which is not used by most tasks)
+ lazy val totalsize = ct.par.size
+ var iterated = 0
+
+ protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit)
+
+ override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
+ val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
+ level < maxsplits
+ }
+
+ def dup = {
+ val it = newIterator(0, ct, false)
+ dupTo(it)
+ it.iterated = this.iterated
+ it
+ }
+
+ override def next() = {
+ iterated += 1
+ super.next()
+ }
+
+ def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]]
+
+ override def isRemainingCheap = false
+
+ def remaining: Int = totalsize - iterated
+}
+
+
+/** Only used within the `ParCtrie`. */
+private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] {
+
+ def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
+ throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
+
+ val thiz = this.asInstanceOf[ParCtrie[K, V]]
+ val that = other.asInstanceOf[ParCtrie[K, V]]
+ val result = new ParCtrie[K, V]
+
+ result ++= thiz.iterator
+ result ++= that.iterator
+
+ result
+ }
+
+ override def canBeShared = true
+
+}
+
+
+object ParCtrie extends ParMapFactory[ParCtrie] {
+
+ def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V]
+
+ def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V]
+
+ implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V]
+
+}
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 31750b0b0d..6ce6c45460 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -12,12 +12,12 @@ package mutable
-
import collection.generic._
import collection.mutable.DefaultEntry
import collection.mutable.HashEntry
import collection.mutable.HashTable
import collection.mutable.UnrolledBuffer
+import collection.parallel.Task
@@ -56,7 +56,7 @@ self =>
override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
- def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
+ def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
override def size = tableSize
@@ -93,14 +93,11 @@ self =>
override def stringPrefix = "ParHashMap"
- type SCPI = SignalContextPassingIterator[ParHashMapIterator]
-
class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
- extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) with ParIterator {
- me: SCPI =>
+ extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
- new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) with SCPI
+ new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
private def writeObject(out: java.io.ObjectOutputStream) {
@@ -160,14 +157,13 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa
extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with collection.mutable.HashTable.HashUtils[K]
{
-//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
- import collection.parallel.tasksupport._
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
+ private var seedvalue = 27
def +=(elem: (K, V)) = {
sz += 1
- val hc = improve(elemHashCode(elem._1))
+ val hc = improve(elemHashCode(elem._1), seedvalue)
val pos = (hc >>> nonmasklen)
if (buckets(pos) eq null) {
// initialize bucket
@@ -180,9 +176,9 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024
// construct table
- val table = new AddingHashTable(size, tableLoadFactor)
+ val table = new AddingHashTable(size, tableLoadFactor, seedvalue)
val bucks = buckets.map(b => if (b ne null) b.headPtr else null)
- val insertcount = executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length))
+ val insertcount = combinerTaskSupport.executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length))
table.setSize(insertcount)
// TODO compare insertcount and size to see if compression is needed
val c = table.hashTableContents
@@ -214,11 +210,12 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
* and true if the key was successfully inserted. It does not update the number of elements
* in the table.
*/
- private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int) extends HashTable[K, DefaultEntry[K, V]] {
+ private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] {
import HashTable._
_loadFactor = lf
table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems)))
tableSize = 0
+ seedvalue = _seedvalue
threshold = newThreshold(_loadFactor, table.length)
sizeMapInit(table.length)
def setSize(sz: Int) = tableSize = sz
@@ -289,7 +286,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
insertcount
}
private def assertCorrectBlock(block: Int, k: K) {
- val hc = improve(elemHashCode(k))
+ val hc = improve(elemHashCode(k), seedvalue)
if ((hc >>> nonmasklen) != block) {
println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
assert((hc >>> nonmasklen) == block)
@@ -302,7 +299,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
override def merge(that: FillBlocks) {
this.result += that.result
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 7763cdf318..e0a2ab03df 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -8,10 +8,15 @@
package scala.collection.parallel.mutable
+
+
import collection.generic._
import collection.mutable.FlatHashTable
import collection.parallel.Combiner
import collection.mutable.UnrolledBuffer
+import collection.parallel.Task
+
+
/** A parallel hash set.
*
@@ -66,14 +71,11 @@ extends ParSet[T]
def contains(elem: T) = containsEntry(elem)
- def splitter = new ParHashSetIterator(0, table.length, size) with SCPI
-
- type SCPI = SignalContextPassingIterator[ParHashSetIterator]
+ def splitter = new ParHashSetIterator(0, table.length, size)
class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int)
- extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) with ParIterator {
- me: SCPI =>
- def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) with SCPI
+ extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) {
+ def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total)
}
private def writeObject(s: java.io.ObjectOutputStream) {
@@ -116,11 +118,10 @@ private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFacto
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
with collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- import collection.parallel.tasksupport._
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
private var seedvalue = 27
-
+
def +=(elem: T) = {
sz += 1
val hc = improve(elemHashCode(elem), seedvalue)
@@ -142,7 +143,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
private def parPopulate: FlatHashTable.Contents[T] = {
// construct it in parallel
val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue)
- val (inserted, leftovers) = executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
+ val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
var leftinserts = 0
for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T])
table.setSize(leftinserts + inserted)
@@ -307,7 +308,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
// the total number of successfully inserted elements is adjusted accordingly
result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2)
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 9b8e233b95..8c93732427 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -29,7 +29,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
/** A parallel iterator returning all the entries.
*/
abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]]
- (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
+ (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
extends IterableSplitter[T] with SizeMapUtils {
private val itertable = table
private var traversed = 0
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index eadc93d422..01eb17024e 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -8,18 +8,20 @@
package scala.collection.parallel.mutable
+
+
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
import scala.collection.parallel.TaskSupport
-//import scala.collection.parallel.EnvironmentPassingCombiner
import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
+import scala.collection.parallel.Task
+
+
/** An array combiner that uses a chain of arraybuffers to store elements. */
trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] {
-//self: EnvironmentPassingCombiner[T, ParArray[T]] =>
- import collection.parallel.tasksupport._
override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
@@ -30,7 +32,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
val arrayseq = new ArraySeq[T](size)
val array = arrayseq.array.asInstanceOf[Array[Any]]
- executeAndWaitResult(new CopyChainToArray(array, 0, size))
+ combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size))
new ParArray(arrayseq)
} else { // optimisation if there is only 1 array
@@ -79,7 +81,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
val fp = howmany / 2
List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index dc583fb4e7..410b542a68 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -18,9 +18,9 @@ import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.UnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.parallel.TaskSupport
-//import scala.collection.parallel.EnvironmentPassingCombiner
import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
+import scala.collection.parallel.Task
@@ -40,8 +40,6 @@ extends Combiner[T, ParArray[T]] {
// because size is doubling, random access is O(logn)!
val buff = new DoublingUnrolledBuffer[Any]
- import collection.parallel.tasksupport._
-
def +=(elem: T) = {
buff += elem
this
@@ -51,7 +49,7 @@ extends Combiner[T, ParArray[T]] {
val arrayseq = new ArraySeq[T](size)
val array = arrayseq.array.asInstanceOf[Array[Any]]
- executeAndWaitResult(new CopyUnrolledToArray(array, 0, size))
+ combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size))
new ParArray(arrayseq)
}
@@ -109,7 +107,7 @@ extends Combiner[T, ParArray[T]] {
val fp = howmany / 2
List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")"
}
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index f152629c50..943e0208c7 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -46,8 +46,16 @@ package object parallel {
else new ThreadPoolTaskSupport
} else new ThreadPoolTaskSupport
- val tasksupport = getTaskSupport
-
+ val defaultTaskSupport: TaskSupport = getTaskSupport
+
+ def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
+ c match {
+ case pc: ParIterableLike[_, _, _] => pc.tasksupport = t
+ case _ => // do nothing
+ }
+ c
+ }
+
/* implicit conversions */
implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] {
@@ -83,6 +91,7 @@ package object parallel {
}
}
+
package parallel {
trait FactoryOps[From, Elem, To] {
trait Otherwise[R] {
@@ -114,6 +123,18 @@ package parallel {
/* classes */
+ trait CombinerFactory[U, Repr] {
+ /** Provides a combiner used to construct a collection. */
+ def apply(): Combiner[U, Repr]
+ /** The call to the `apply` method can create a new combiner each time.
+ * If it does, this method returns `false`.
+ * The same combiner factory may be used each time (typically, this is
+ * the case for concurrent collections, which are thread safe).
+ * If so, the method returns `true`.
+ */
+ def doesShareCombiners: Boolean
+ }
+
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
final case class CompositeThrowable(
val throwables: Set[Throwable]
@@ -127,8 +148,9 @@ package parallel {
* Automatically forwards the signal delegate when splitting.
*/
private[parallel] class BufferSplitter[T]
- (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling)
+ (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: collection.generic.Signalling)
extends IterableSplitter[T] {
+ signalDelegate = _sigdel
def hasNext = index < until
def next = {
val r = buffer(index)
@@ -182,7 +204,7 @@ package parallel {
* the receiver (which will be the return value).
*/
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
- (private val bucketnumber: Int)
+ (private val bucketnumber: Int)
extends Combiner[Elem, To] {
//self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
@@ -196,6 +218,7 @@ package parallel {
}
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
+
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
new file mode 100644
index 0000000000..c38e668f30
--- /dev/null
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -0,0 +1,24 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+
+
+import scala.annotation.implicitNotFound
+import scala.util.Duration
+
+
+
+trait Awaitable[+T] {
+ @implicitNotFound(msg = "Waiting must be done by calling `blocking(timeout) b`, where `b` is the `Awaitable` object or a potentially blocking piece of code.")
+ def await(atMost: Duration)(implicit canawait: CanAwait): T
+}
+
+
+
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index 43d684641e..f6d6341151 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -46,4 +46,5 @@ class Channel[A] {
written = written.next
x
}
+
}
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
new file mode 100644
index 0000000000..7d005838d3
--- /dev/null
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -0,0 +1,111 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+
+
+import java.util.concurrent.{ Executors, ExecutorService }
+import scala.concurrent.forkjoin.ForkJoinPool
+import scala.util.{ Duration, Try, Success, Failure }
+import ConcurrentPackageObject._
+
+
+
+/** This package object contains primitives for concurrent and parallel programming.
+ */
+abstract class ConcurrentPackageObject {
+ /** A global execution environment for executing lightweight tasks.
+ */
+ lazy val executionContext =
+ new impl.ExecutionContextImpl(getExecutorService)
+
+ private[concurrent] def getExecutorService: AnyRef =
+ if (util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = util.Properties.javaVmVendor
+ if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinPool
+ else Executors.newCachedThreadPool()
+ } else Executors.newCachedThreadPool()
+
+ val handledFutureException: PartialFunction[Throwable, Throwable] = {
+ case t: Throwable if isFutureThrowable(t) => t
+ }
+
+ // TODO rename appropriately and make public
+ private[concurrent] def isFutureThrowable(t: Throwable) = t match {
+ case e: Error => false
+ case t: scala.util.control.ControlThrowable => false
+ case i: InterruptedException => false
+ case _ => true
+ }
+
+ private[concurrent] def resolve[T](source: Try[T]): Try[T] = source match {
+ case Failure(t: scala.runtime.NonLocalReturnControl[_]) => Success(t.value.asInstanceOf[T])
+ case Failure(t: scala.util.control.ControlThrowable) => Failure(new ExecutionException("Boxed ControlThrowable", t))
+ case Failure(t: InterruptedException) => Failure(new ExecutionException("Boxed InterruptedException", t))
+ case Failure(e: Error) => Failure(new ExecutionException("Boxed Error", e))
+ case _ => source
+ }
+
+ private[concurrent] def resolver[T] =
+ resolverFunction.asInstanceOf[PartialFunction[Throwable, Try[T]]]
+
+ /* concurrency constructs */
+
+ def future[T](body: =>T)(implicit execCtx: ExecutionContext = executionContext): Future[T] =
+ execCtx future body
+
+ def promise[T]()(implicit execCtx: ExecutionContext = executionContext): Promise[T] =
+ execCtx promise
+
+ /** Wraps a block of code into an awaitable object. */
+ def body2awaitable[T](body: =>T) = new Awaitable[T] {
+ def await(atMost: Duration)(implicit cb: CanAwait) = body
+ }
+
+ /** Used to block on a piece of code which potentially blocks.
+ *
+ * @param body A piece of code which contains potentially blocking or long running calls.
+ *
+ * Calling this method may throw the following exceptions:
+ * - CancellationException - if the computation was cancelled
+ * - InterruptedException - in the case that a wait within the blockable object was interrupted
+ * - TimeoutException - in the case that the blockable object timed out
+ */
+ def blocking[T](atMost: Duration)(body: =>T)(implicit execCtx: ExecutionContext): T =
+ executionContext.blocking(atMost)(body)
+
+ /** Blocks on an awaitable object.
+ *
+ * @param awaitable An object with a `block` method which runs potentially blocking or long running calls.
+ *
+ * Calling this method may throw the following exceptions:
+ * - CancellationException - if the computation was cancelled
+ * - InterruptedException - in the case that a wait within the blockable object was interrupted
+ * - TimeoutException - in the case that the blockable object timed out
+ */
+ def blocking[T](awaitable: Awaitable[T], atMost: Duration)(implicit execCtx: ExecutionContext = executionContext): T =
+ executionContext.blocking(awaitable, atMost)
+
+ @inline implicit final def int2durationops(x: Int): DurationOps = new DurationOps(x)
+}
+
+private[concurrent] object ConcurrentPackageObject {
+ // TODO, docs, return type
+ // Note that having this in the package object led to failures when
+ // compiling a subset of sources; it seems that the wildcard is not
+ // properly handled, and you get messages like "type _$1 defined twice".
+ // This is consistent with other package object breakdowns.
+ private val resolverFunction: PartialFunction[Throwable, Try[_]] = {
+ case t: scala.runtime.NonLocalReturnControl[_] => Success(t.value)
+ case t: scala.util.control.ControlThrowable => Failure(new ExecutionException("Boxed ControlThrowable", t))
+ case t: InterruptedException => Failure(new ExecutionException("Boxed InterruptedException", t))
+ case e: Error => Failure(new ExecutionException("Boxed Error", e))
+ case t => Failure(t)
+ }
+}
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index e308c3b5a6..a17153bad5 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -8,7 +8,6 @@
package scala.concurrent
-import ops.future
/** A `DelayedLazyVal` is a wrapper for lengthy computations which have a
* valid partially computed result.
@@ -40,8 +39,10 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
*/
def apply(): T = if (isDone) complete else f()
- future {
+ // TODO replace with scala.concurrent.future { ... }
+ ops.future {
body
_isDone = true
}
+
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
new file mode 100644
index 0000000000..eb1b3355c0
--- /dev/null
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -0,0 +1,132 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+
+
+import java.util.concurrent.atomic.{ AtomicInteger }
+import java.util.concurrent.{ Executors, Future => JFuture, Callable }
+import scala.util.Duration
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveTask => FJTask, RecursiveAction, ForkJoinWorkerThread }
+import scala.collection.generic.CanBuildFrom
+import collection._
+
+
+
+trait ExecutionContext {
+
+ protected implicit object CanAwaitEvidence extends CanAwait
+
+ def execute(runnable: Runnable): Unit
+
+ def execute[U](body: () => U): Unit
+
+ def promise[T]: Promise[T]
+
+ def future[T](body: Callable[T]): Future[T] = future(body.call())
+
+ def future[T](body: => T): Future[T]
+
+ def blocking[T](atMost: Duration)(body: =>T): T
+
+ def blocking[T](awaitable: Awaitable[T], atMost: Duration): T
+
+ def reportFailure(t: Throwable): Unit
+
+ /* implementations follow */
+
+ private implicit val executionContext = this
+
+ def keptPromise[T](result: T): Promise[T] = {
+ val p = promise[T]
+ p success result
+ }
+
+ def brokenPromise[T](t: Throwable): Promise[T] = {
+ val p = promise[T]
+ p failure t
+ }
+
+ /** TODO some docs
+ *
+ */
+ def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]]): Future[Coll[T]] = {
+ import nondeterministic._
+ val buffer = new mutable.ArrayBuffer[T]
+ val counter = new AtomicInteger(1) // how else could we do this?
+ val p: Promise[Coll[T]] = promise[Coll[T]] // we need an implicit execctx in the signature
+ var idx = 0
+
+ def tryFinish() = if (counter.decrementAndGet() == 0) {
+ val builder = cbf(futures)
+ builder ++= buffer
+ p success builder.result
+ }
+
+ for (f <- futures) {
+ val currentIndex = idx
+ buffer += null.asInstanceOf[T]
+ counter.incrementAndGet()
+ f onComplete {
+ case Failure(t) =>
+ p tryFailure t
+ case Success(v) =>
+ buffer(currentIndex) = v
+ tryFinish()
+ }
+ idx += 1
+ }
+
+ tryFinish()
+
+ p.future
+ }
+
+ /** TODO some docs
+ *
+ */
+ def any[T](futures: Traversable[Future[T]]): Future[T] = {
+ val p = promise[T]
+ val completeFirst: Try[T] => Unit = elem => p tryComplete elem
+
+ futures foreach (_ onComplete completeFirst)
+
+ p.future
+ }
+
+ /** TODO some docs
+ *
+ */
+ def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean): Future[Option[T]] = {
+ if (futures.isEmpty) Promise.kept[Option[T]](None).future
+ else {
+ val result = promise[Option[T]]
+ val count = new AtomicInteger(futures.size)
+ val search: Try[T] => Unit = {
+ v => v match {
+ case Success(r) => if (predicate(r)) result trySuccess Some(r)
+ case _ =>
+ }
+ if (count.decrementAndGet() == 0) result trySuccess None
+ }
+
+ futures.foreach(_ onComplete search)
+
+ result.future
+ }
+ }
+
+}
+
+
+sealed trait CanAwait
+
+
+
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
new file mode 100644
index 0000000000..eb54b61db0
--- /dev/null
+++ b/src/library/scala/concurrent/Future.scala
@@ -0,0 +1,492 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+
+
+import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
+import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
+import java.lang.{ Iterable => JIterable }
+import java.util.{ LinkedList => JLinkedList }
+import java.{ lang => jl }
+import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
+
+import scala.util.{ Timeout, Duration, Try, Success, Failure }
+import scala.Option
+
+import scala.annotation.tailrec
+import scala.collection.mutable.Stack
+import scala.collection.mutable.Builder
+import scala.collection.generic.CanBuildFrom
+
+
+
+/** The trait that represents futures.
+ *
+ * Asynchronous computations that yield futures are created with the `future` call:
+ *
+ * {{{
+ * val s = "Hello"
+ * val f: Future[String] = future {
+ * s + " future!"
+ * }
+ * f onSuccess {
+ * case msg => println(msg)
+ * }
+ * }}}
+ *
+ * @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang
+ *
+ * @define multipleCallbacks
+ * Multiple callbacks may be registered; there is no guarantee that they will be
+ * executed in a particular order.
+ *
+ * @define caughtThrowables
+ * The future may contain a throwable object and this means that the future failed.
+ * Futures obtained through combinators have the same exception as the future they were obtained from.
+ * The following throwable objects are not contained in the future:
+ * - `Error` - errors are not contained within futures
+ * - `InterruptedException` - not contained within futures
+ * - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures
+ *
+ * Instead, the future is completed with a ExecutionException with one of the exceptions above
+ * as the cause.
+ * If a future is failed with a `scala.runtime.NonLocalReturnControl`,
+ * it is completed with a value instead from that throwable instead instead.
+ *
+ * @define nonDeterministic
+ * Note: using this method yields nondeterministic dataflow programs.
+ *
+ * @define forComprehensionExamples
+ * Example:
+ *
+ * {{{
+ * val f = future { 5 }
+ * val g = future { 3 }
+ * val h = for {
+ * x: Int <- f // returns Future(5)
+ * y: Int <- g // returns Future(5)
+ * } yield x + y
+ * }}}
+ *
+ * is translated to:
+ *
+ * {{{
+ * f flatMap { (x: Int) => g map { (y: Int) => x + y } }
+ * }}}
+ */
+trait Future[+T] extends Awaitable[T] {
+self =>
+
+ /* Callbacks */
+
+ /** When this future is completed successfully (i.e. with a value),
+ * apply the provided partial function to the value if the partial function
+ * is defined at that value.
+ *
+ * If the future has already been completed with a value,
+ * this will either be applied immediately or be scheduled asynchronously.
+ *
+ * $multipleCallbacks
+ */
+ def onSuccess[U](pf: PartialFunction[T, U]): this.type = onComplete {
+ case Failure(t) => // do nothing
+ case Success(v) => if (pf isDefinedAt v) pf(v) else { /*do nothing*/ }
+ }
+
+ /** When this future is completed with a failure (i.e. with a throwable),
+ * apply the provided callback to the throwable.
+ *
+ * $caughtThrowables
+ *
+ * If the future has already been completed with a failure,
+ * this will either be applied immediately or be scheduled asynchronously.
+ *
+ * Will not be called in case that the future is completed with a value.
+ *
+ * $multipleCallbacks
+ */
+ def onFailure[U](callback: PartialFunction[Throwable, U]): this.type = onComplete {
+ case Failure(t) => if (isFutureThrowable(t) && callback.isDefinedAt(t)) callback(t) else { /*do nothing*/ }
+ case Success(v) => // do nothing
+ }
+
+ /** When this future is completed, either through an exception, a timeout, or a value,
+ * apply the provided function.
+ *
+ * If the future has already been completed,
+ * this will either be applied immediately or be scheduled asynchronously.
+ *
+ * $multipleCallbacks
+ */
+ def onComplete[U](func: Try[T] => U): this.type
+
+
+ /* Miscellaneous */
+
+ /** Creates a new promise.
+ */
+ def newPromise[S]: Promise[S]
+
+
+ /* Projections */
+
+ /** Returns a failed projection of this future.
+ *
+ * The failed projection is a future holding a value of type `Throwable`.
+ *
+ * It is completed with a value which is the throwable of the original future
+ * in case the original future is failed.
+ *
+ * It is failed with a `NoSuchElementException` if the original future is completed successfully.
+ *
+ * Blocking on this future returns a value if the original future is completed with an exception
+ * and throws a corresponding exception if the original future fails.
+ */
+ def failed: Future[Throwable] = {
+ def noSuchElem(v: T) =
+ new NoSuchElementException("Future.failed not completed with a throwable. Instead completed with: " + v)
+
+ val p = newPromise[Throwable]
+
+ onComplete {
+ case Failure(t) => p success t
+ case Success(v) => p failure noSuchElem(v)
+ }
+
+ p.future
+ }
+
+
+ /* Monadic operations */
+
+ /** Asynchronously processes the value in the future once the value becomes available.
+ *
+ * Will not be called if the future fails.
+ */
+ def foreach[U](f: T => U): Unit = onComplete {
+ case Success(r) => f(r)
+ case Failure(_) => // do nothing
+ }
+
+ /** Creates a new future by applying a function to the successful result of
+ * this future. If this future is completed with an exception then the new
+ * future will also contain this exception.
+ *
+ * $forComprehensionExample
+ */
+ def map[S](f: T => S): Future[S] = {
+ val p = newPromise[S]
+
+ onComplete {
+ case Failure(t) => p failure t
+ case Success(v) =>
+ try p success f(v)
+ catch {
+ case t => p complete resolver(t)
+ }
+ }
+
+ p.future
+ }
+
+ /** Creates a new future by applying a function to the successful result of
+ * this future, and returns the result of the function as the new future.
+ * If this future is completed with an exception then the new future will
+ * also contain this exception.
+ *
+ * $forComprehensionExample
+ */
+ def flatMap[S](f: T => Future[S]): Future[S] = {
+ val p = newPromise[S]
+
+ onComplete {
+ case Failure(t) => p failure t
+ case Success(v) =>
+ try {
+ f(v) onComplete {
+ case Failure(t) => p failure t
+ case Success(v) => p success v
+ }
+ } catch {
+ case t: Throwable => p complete resolver(t)
+ }
+ }
+
+ p.future
+ }
+
+ /** Creates a new future by filtering the value of the current future with a predicate.
+ *
+ * If the current future contains a value which satisfies the predicate, the new future will also hold that value.
+ * Otherwise, the resulting future will fail with a `NoSuchElementException`.
+ *
+ * If the current future fails or times out, the resulting future also fails or times out, respectively.
+ *
+ * Example:
+ * {{{
+ * val f = future { 5 }
+ * val g = f filter { _ % 2 == 1 }
+ * val h = f filter { _ % 2 == 0 }
+ * await(0) g // evaluates to 5
+ * await(0) h // throw a NoSuchElementException
+ * }}}
+ */
+ def filter(pred: T => Boolean): Future[T] = {
+ val p = newPromise[T]
+
+ onComplete {
+ case Failure(t) => p failure t
+ case Success(v) =>
+ try {
+ if (pred(v)) p success v
+ else p failure new NoSuchElementException("Future.filter predicate is not satisfied by: " + v)
+ } catch {
+ case t: Throwable => p complete resolver(t)
+ }
+ }
+
+ p.future
+ }
+
+ /** Creates a new future by mapping the value of the current future if the given partial function is defined at that value.
+ *
+ * If the current future contains a value for which the partial function is defined, the new future will also hold that value.
+ * Otherwise, the resulting future will fail with a `NoSuchElementException`.
+ *
+ * If the current future fails or times out, the resulting future also fails or times out, respectively.
+ *
+ * Example:
+ * {{{
+ * val f = future { -5 }
+ * val g = f collect {
+ * case x if x < 0 => -x
+ * }
+ * val h = f collect {
+ * case x if x > 0 => x * 2
+ * }
+ * await(0) g // evaluates to 5
+ * await(0) h // throw a NoSuchElementException
+ * }}}
+ */
+ def collect[S](pf: PartialFunction[T, S]): Future[S] = {
+ val p = newPromise[S]
+
+ onComplete {
+ case Failure(t) => p failure t
+ case Success(v) =>
+ try {
+ if (pf.isDefinedAt(v)) p success pf(v)
+ else p failure new NoSuchElementException("Future.collect partial function is not defined at: " + v)
+ } catch {
+ case t: Throwable => p complete resolver(t)
+ }
+ }
+
+ p.future
+ }
+
+ /** Creates a new future that will handle any matching throwable that this
+ * future might contain. If there is no match, or if this future contains
+ * a valid result then the new future will contain the same.
+ *
+ * Example:
+ *
+ * {{{
+ * future (6 / 0) recover { case e: ArithmeticException ⇒ 0 } // result: 0
+ * future (6 / 0) recover { case e: NotFoundException ⇒ 0 } // result: exception
+ * future (6 / 2) recover { case e: ArithmeticException ⇒ 0 } // result: 3
+ * }}}
+ */
+ def recover[U >: T](pf: PartialFunction[Throwable, U]): Future[U] = {
+ val p = newPromise[U]
+
+ onComplete {
+ case Failure(t) if pf isDefinedAt t =>
+ try { p success pf(t) }
+ catch { case t: Throwable => p complete resolver(t) }
+ case otherwise => p complete otherwise
+ }
+
+ p.future
+ }
+
+ /** Creates a new future that will handle any matching throwable that this
+ * future might contain by assigning it a value of another future.
+ *
+ * If there is no match, or if this future contains
+ * a valid result then the new future will contain the same result.
+ *
+ * Example:
+ *
+ * {{{
+ * val f = future { Int.MaxValue }
+ * future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
+ * }}}
+ */
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]]): Future[U] = {
+ val p = newPromise[U]
+
+ onComplete {
+ case Failure(t) if pf isDefinedAt t =>
+ try {
+ p completeWith pf(t)
+ } catch {
+ case t: Throwable => p complete resolver(t)
+ }
+ case otherwise => p complete otherwise
+ }
+
+ p.future
+ }
+
+ /** Zips the values of `this` and `that` future, and creates
+ * a new future holding the tuple of their results.
+ *
+ * If `this` future fails, the resulting future is failed
+ * with the throwable stored in `this`.
+ * Otherwise, if `that` future fails, the resulting future is failed
+ * with the throwable stored in `that`.
+ */
+ def zip[U](that: Future[U]): Future[(T, U)] = {
+ val p = newPromise[(T, U)]
+
+ this onComplete {
+ case Failure(t) => p failure t
+ case Success(r) => that onSuccess {
+ case r2 => p success ((r, r2))
+ }
+ }
+
+ that onFailure {
+ case f => p failure f
+ }
+
+ p.future
+ }
+
+ /** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
+ * the result of the `that` future if `that` is completed successfully.
+ * If both futures are failed, the resulting future holds the throwable object of the first future.
+ *
+ * Using this method will not cause concurrent programs to become nondeterministic.
+ *
+ * Example:
+ * {{{
+ * val f = future { sys.error("failed") }
+ * val g = future { 5 }
+ * val h = f orElse g
+ * await(0) h // evaluates to 5
+ * }}}
+ */
+ def fallbackTo[U >: T](that: Future[U]): Future[U] = {
+ val p = newPromise[U]
+
+ onComplete {
+ case Failure(t) => that onComplete {
+ case Failure(_) => p failure t
+ case Success(v) => p success v
+ }
+ case Success(v) => p success v
+ }
+
+ p.future
+ }
+
+ /** Applies the side-effecting function to the result of this future, and returns
+ * a new future with the result of this future.
+ *
+ * This method allows one to enforce that the callbacks are executed in a
+ * specified order.
+ *
+ * Note that if one of the chained `andThen` callbacks throws
+ * an exception, that exception is not propagated to the subsequent `andThen`
+ * callbacks. Instead, the subsequent `andThen` callbacks are given the original
+ * value of this future.
+ *
+ * The following example prints out `5`:
+ *
+ * {{{
+ * val f = future { 5 }
+ * f andThen {
+ * case r => sys.error("runtime exception")
+ * } andThen {
+ * case Failure(t) => println(t)
+ * case Success(v) => println(v)
+ * }
+ * }}}
+ */
+ def andThen[U](pf: PartialFunction[Try[T], U]): Future[T] = {
+ val p = newPromise[T]
+
+ onComplete {
+ case r =>
+ try if (pf isDefinedAt r) pf(r)
+ finally p complete r
+ }
+
+ p.future
+ }
+
+ /** Creates a new future which holds the result of either this future or `that` future, depending on
+ * which future was completed first.
+ *
+ * $nonDeterministic
+ *
+ * Example:
+ * {{{
+ * val f = future { sys.error("failed") }
+ * val g = future { 5 }
+ * val h = f either g
+ * await(0) h // evaluates to either 5 or throws a runtime exception
+ * }}}
+ */
+ def either[U >: T](that: Future[U]): Future[U] = {
+ val p = self.newPromise[U]
+
+ val completePromise: PartialFunction[Try[U], _] = {
+ case Failure(t) => p tryFailure t
+ case Success(v) => p trySuccess v
+ }
+
+ self onComplete completePromise
+ that onComplete completePromise
+
+ p.future
+ }
+
+}
+
+
+
+/** TODO some docs
+ *
+ * @define nonDeterministic
+ * Note: using this method yields nondeterministic dataflow programs.
+ */
+object Future {
+
+ // TODO make more modular by encoding all other helper methods within the execution context
+ /** TODO some docs
+ */
+ def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]], ec: ExecutionContext): Future[Coll[T]] =
+ ec.all[T, Coll](futures)
+
+ // move this to future companion object
+ @inline def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = executor.future(body)
+
+ def any[T](futures: Traversable[Future[T]])(implicit ec: ExecutionContext): Future[T] = ec.any(futures)
+
+ def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean)(implicit ec: ExecutionContext): Future[Option[T]] = ec.find(futures)(predicate)
+
+}
+
+
+
+
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index c5fcde2d19..75e6299ad9 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -13,6 +13,7 @@ package scala.concurrent
*
* @author Philipp Haller
*/
+@deprecated("Use `ExecutionContext`s instead.", "2.10.0")
trait FutureTaskRunner extends TaskRunner {
/** The type of the futures that the underlying task runner supports.
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index db3c490882..127a0e0055 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -17,6 +17,7 @@ import java.util.concurrent.{ExecutorService, Executor}
*/
object JavaConversions {
+ @deprecated("Use `asExecutionContext` instead.", "2.10.0")
implicit def asTaskRunner(exec: ExecutorService): FutureTaskRunner =
new ThreadPoolRunner {
override protected def executor =
@@ -26,6 +27,7 @@ object JavaConversions {
exec.shutdown()
}
+ @deprecated("Use `asExecutionContext` instead.", "2.10.0")
implicit def asTaskRunner(exec: Executor): TaskRunner =
new TaskRunner {
type Task[T] = Runnable
@@ -46,4 +48,9 @@ object JavaConversions {
// do nothing
}
}
+
+ implicit def asExecutionContext(exec: ExecutorService): ExecutionContext = null // TODO
+
+ implicit def asExecutionContext(exec: Executor): ExecutionContext = null // TODO
+
}
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index 9c6f4d51d6..0b6d82e76f 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -12,6 +12,7 @@ package scala.concurrent
*
* @author Philipp Haller
*/
+@deprecated("Not used.", "2.10.0")
trait ManagedBlocker {
/**
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
new file mode 100644
index 0000000000..4404e90971
--- /dev/null
+++ b/src/library/scala/concurrent/Promise.scala
@@ -0,0 +1,132 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import scala.util.{ Try, Success, Failure }
+
+
+
+
+/** Promise is an object which can be completed with a value or failed
+ * with an exception.
+ *
+ * @define promiseCompletion
+ * If the promise has already been fulfilled, failed or has timed out,
+ * calling this method will throw an IllegalStateException.
+ *
+ * @define allowedThrowables
+ * If the throwable used to fail this promise is an error, a control exception
+ * or an interrupted exception, it will be wrapped as a cause within an
+ * `ExecutionException` which will fail the promise.
+ *
+ * @define nonDeterministic
+ * Note: Using this method may result in non-deterministic concurrent programs.
+ */
+trait Promise[T] {
+
+ import nondeterministic._
+
+ /** Future containing the value of this promise.
+ */
+ def future: Future[T]
+
+ /** Completes the promise with either an exception or a value.
+ *
+ * @param result Either the value or the exception to complete the promise with.
+ *
+ * $promiseCompletion
+ */
+ def complete(result:Try[T]): this.type = if (tryComplete(result)) this else throwCompleted
+
+ /** Tries to complete the promise with either a value or the exception.
+ *
+ * $nonDeterministic
+ *
+ * @return If the promise has already been completed returns `false`, or `true` otherwise.
+ */
+ def tryComplete(result: Try[T]): Boolean
+
+ /** Completes this promise with the specified future, once that future is completed.
+ *
+ * @return This promise
+ */
+ final def completeWith(other: Future[T]): this.type = {
+ other onComplete {
+ this complete _
+ }
+ this
+ }
+
+ /** Completes the promise with a value.
+ *
+ * @param value The value to complete the promise with.
+ *
+ * $promiseCompletion
+ */
+ def success(v: T): this.type = if (trySuccess(v)) this else throwCompleted
+
+ /** Tries to complete the promise with a value.
+ *
+ * $nonDeterministic
+ *
+ * @return If the promise has already been completed returns `false`, or `true` otherwise.
+ */
+ def trySuccess(value: T): Boolean = tryComplete(Success(value))
+
+ /** Completes the promise with an exception.
+ *
+ * @param t The throwable to complete the promise with.
+ *
+ * $allowedThrowables
+ *
+ * $promiseCompletion
+ */
+ def failure(t: Throwable): this.type = if (tryFailure(t)) this else throwCompleted
+
+ /** Tries to complete the promise with an exception.
+ *
+ * $nonDeterministic
+ *
+ * @return If the promise has already been completed returns `false`, or `true` otherwise.
+ */
+ def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
+
+ /** Wraps a `Throwable` in an `ExecutionException` if necessary. TODO replace with `resolver` from scala.concurrent
+ *
+ * $allowedThrowables
+ */
+ protected def wrap(t: Throwable): Throwable = t match {
+ case t: Throwable if isFutureThrowable(t) => t
+ case _ => new ExecutionException(t)
+ }
+
+ private def throwCompleted = throw new IllegalStateException("Promise already completed.")
+
+}
+
+
+
+object Promise {
+
+ def kept[T](result: T)(implicit execctx: ExecutionContext): Promise[T] =
+ execctx keptPromise result
+
+ def broken[T](t: Throwable)(implicit execctx: ExecutionContext): Promise[T] =
+ execctx brokenPromise t
+
+}
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/concurrent/Scheduler.scala b/src/library/scala/concurrent/Scheduler.scala
new file mode 100644
index 0000000000..39d798e6b4
--- /dev/null
+++ b/src/library/scala/concurrent/Scheduler.scala
@@ -0,0 +1,54 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import scala.util.Duration
+
+/** A service for scheduling tasks and thunks for one-time, or periodic execution.
+ */
+trait Scheduler {
+
+ /** Schedules a thunk for repeated execution with an initial delay and a frequency.
+ *
+ * @param delay the initial delay after which the thunk should be executed
+ * the first time
+ * @param frequency the frequency with which the thunk should be executed,
+ * as a time period between subsequent executions
+ */
+ def schedule(delay: Duration, frequency: Duration)(thunk: => Unit): Cancellable
+
+ /** Schedules a task for execution after a given delay.
+ *
+ * @param delay the duration after which the task should be executed
+ * @param task the task that is scheduled for execution
+ * @return a `Cancellable` that may be used to cancel the execution
+ * of the task
+ */
+ def scheduleOnce(delay: Duration, task: Runnable): Cancellable
+
+ /** Schedules a thunk for execution after a given delay.
+ *
+ * @param delay the duration after which the thunk should be executed
+ * @param thunk the thunk that is scheduled for execution
+ * @return a `Cancellable` that may be used to cancel the execution
+ * of the thunk
+ */
+ def scheduleOnce(delay: Duration)(task: => Unit): Cancellable
+
+}
+
+
+
+trait Cancellable {
+
+ /** Cancels the underlying task.
+ */
+ def cancel(): Unit
+
+}
diff --git a/src/library/scala/concurrent/Task.scala b/src/library/scala/concurrent/Task.scala
new file mode 100644
index 0000000000..eb3efbb422
--- /dev/null
+++ b/src/library/scala/concurrent/Task.scala
@@ -0,0 +1,13 @@
+package scala.concurrent
+
+
+
+trait Task[+T] {
+
+ def start(): Unit
+
+ def future: Future[T]
+
+}
+
+
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 64e62adfd3..500d79e07f 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -12,6 +12,7 @@ package scala.concurrent
*
* @author Philipp Haller
*/
+@deprecated("Use `ExecutionContext`s instead.", "2.10.0")
trait TaskRunner {
type Task[T]
diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala
index 588073dc5e..7994255b25 100644
--- a/src/library/scala/concurrent/TaskRunners.scala
+++ b/src/library/scala/concurrent/TaskRunners.scala
@@ -14,6 +14,7 @@ import java.util.concurrent.{ThreadPoolExecutor, LinkedBlockingQueue, TimeUnit}
*
* @author Philipp Haller
*/
+@deprecated("Use `ExecutionContext`s instead.", "2.10.0")
object TaskRunners {
implicit val threadRunner: FutureTaskRunner =
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index 27d8f2cc32..a3e0253634 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -15,6 +15,7 @@ import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
*
* @author Philipp Haller
*/
+@deprecated("Use `ExecutionContext`s instead.", "2.10.0")
trait ThreadPoolRunner extends FutureTaskRunner {
type Task[T] = Callable[T] with Runnable
diff --git a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
new file mode 100644
index 0000000000..745d2d1a15
--- /dev/null
+++ b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
@@ -0,0 +1,44 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+package default
+
+import scala.util.Duration
+
+private[concurrent] final class SchedulerImpl extends Scheduler {
+ private val timer =
+ new java.util.Timer(true) // the associated thread runs as a daemon
+
+ def schedule(delay: Duration, frequency: Duration)(thunk: => Unit): Cancellable = ???
+
+ def scheduleOnce(delay: Duration, task: Runnable): Cancellable = {
+ val timerTask = new java.util.TimerTask {
+ def run(): Unit =
+ task.run()
+ }
+ timer.schedule(timerTask, delay.toMillis)
+ new Cancellable {
+ def cancel(): Unit =
+ timerTask.cancel()
+ }
+ }
+
+ def scheduleOnce(delay: Duration)(task: => Unit): Cancellable = {
+ val timerTask = new java.util.TimerTask {
+ def run(): Unit =
+ task
+ }
+ timer.schedule(timerTask, delay.toMillis)
+ new Cancellable {
+ def cancel(): Unit =
+ timerTask.cancel()
+ }
+ }
+
+}
diff --git a/src/library/scala/concurrent/default/TaskImpl.scala.disabled b/src/library/scala/concurrent/default/TaskImpl.scala.disabled
new file mode 100644
index 0000000000..94e54cb372
--- /dev/null
+++ b/src/library/scala/concurrent/default/TaskImpl.scala.disabled
@@ -0,0 +1,313 @@
+package scala.concurrent
+package default
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
+import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveAction, ForkJoinWorkerThread }
+import scala.util.Try
+import scala.util
+import scala.util.Duration
+import scala.annotation.tailrec
+
+
+
+private[concurrent] trait Completable[T] {
+self: Future[T] =>
+
+ val executor: ExecutionContextImpl
+
+ def newPromise[S]: Promise[S] = executor promise
+
+ type Callback = Try[T] => Any
+
+ def getState: State[T]
+
+ def casState(oldv: State[T], newv: State[T]): Boolean
+
+ protected def dispatch[U](r: Runnable) = executionContext execute r
+
+ protected def processCallbacks(cbs: List[Callback], r: Try[T]) =
+ for (cb <- cbs) dispatch(new Runnable {
+ override def run() = cb(r)
+ })
+
+ def future: Future[T] = self
+
+ def onComplete[U](callback: Try[T] => U): this.type = {
+ @tailrec def tryAddCallback(): Try[T] = {
+ getState match {
+ case p @ Pending(lst) =>
+ val pt = p.asInstanceOf[Pending[T]]
+ if (casState(pt, Pending(callback :: pt.callbacks))) null
+ else tryAddCallback()
+ case Success(res) => util.Success(res)
+ case Failure(t) => util.Failure(t)
+ }
+ }
+
+ val res = tryAddCallback()
+ if (res != null) dispatch(new Runnable {
+ override def run() =
+ try callback(res)
+ catch handledFutureException andThen {
+ t => Console.err.println(t)
+ }
+ })
+
+ this
+ }
+
+ def isTimedout: Boolean = getState match {
+ case Failure(ft: FutureTimeoutException) => true
+ case _ => false
+ }
+
+}
+
+private[concurrent] class PromiseImpl[T](context: ExecutionContextImpl)
+extends Promise[T] with Future[T] with Completable[T] {
+
+ val executor: scala.concurrent.default.ExecutionContextImpl = context
+
+ @volatile private var state: State[T] = _
+
+ val updater = AtomicReferenceFieldUpdater.newUpdater(classOf[PromiseImpl[T]], classOf[State[T]], "state")
+
+ updater.set(this, Pending(List()))
+
+ def casState(oldv: State[T], newv: State[T]): Boolean = {
+ updater.compareAndSet(this, oldv, newv)
+ }
+
+ def getState: State[T] = {
+ updater.get(this)
+ }
+
+ @tailrec private def tryCompleteState(completed: State[T]): List[Callback] = (getState: @unchecked) match {
+ case p @ Pending(cbs) => if (!casState(p, completed)) tryCompleteState(completed) else cbs
+ case _ => null
+ }
+
+ def tryComplete(r: Try[T]) = r match {
+ case util.Failure(t) => tryFailure(t)
+ case util.Success(v) => trySuccess(v)
+ }
+
+ override def trySuccess(value: T): Boolean = {
+ val cbs = tryCompleteState(Success(value))
+ if (cbs == null)
+ false
+ else {
+ processCallbacks(cbs, util.Success(value))
+ this.synchronized {
+ this.notifyAll()
+ }
+ true
+ }
+ }
+
+ override def tryFailure(t: Throwable): Boolean = {
+ val wrapped = wrap(t)
+ val cbs = tryCompleteState(Failure(wrapped))
+ if (cbs == null)
+ false
+ else {
+ processCallbacks(cbs, util.Failure(wrapped))
+ this.synchronized {
+ this.notifyAll()
+ }
+ true
+ }
+ }
+
+ def await(atMost: Duration)(implicit canawait: scala.concurrent.CanAwait): T = getState match {
+ case Success(res) => res
+ case Failure(t) => throw t
+ case _ =>
+ this.synchronized {
+ while (true)
+ getState match {
+ case Pending(_) => this.wait()
+ case Success(res) => return res
+ case Failure(t) => throw t
+ }
+ }
+ sys.error("unreachable")
+ }
+
+}
+
+private[concurrent] class TaskImpl[T](context: ExecutionContextImpl, body: => T)
+extends RecursiveAction with Task[T] with Future[T] with Completable[T] {
+
+ val executor: ExecutionContextImpl = context
+
+ @volatile private var state: State[T] = _
+
+ val updater = AtomicReferenceFieldUpdater.newUpdater(classOf[TaskImpl[T]], classOf[State[T]], "state")
+
+ updater.set(this, Pending(List()))
+
+ def casState(oldv: State[T], newv: State[T]): Boolean = {
+ updater.compareAndSet(this, oldv, newv)
+ }
+
+ def getState: State[T] = {
+ updater.get(this)
+ }
+
+ @tailrec private def tryCompleteState(completed: State[T]): List[Callback] = (getState: @unchecked) match {
+ case p @ Pending(cbs) => if (!casState(p, completed)) tryCompleteState(completed) else cbs
+ }
+
+ def compute(): Unit = {
+ var cbs: List[Callback] = null
+ try {
+ val res = body
+ processCallbacks(tryCompleteState(Success(res)), util.Success(res))
+ } catch {
+ case t if isFutureThrowable(t) =>
+ processCallbacks(tryCompleteState(Failure(t)), util.Failure(t))
+ case t =>
+ val ee = new ExecutionException(t)
+ processCallbacks(tryCompleteState(Failure(ee)), util.Failure(ee))
+ throw t
+ }
+ }
+
+ def start(): Unit = {
+ Thread.currentThread match {
+ case fj: ForkJoinWorkerThread if fj.getPool eq executor.pool => fork()
+ case _ => executor.pool.execute(this)
+ }
+ }
+
+ // TODO FIXME: handle timeouts
+ def await(atMost: Duration): this.type =
+ await
+
+ def await: this.type = {
+ this.join()
+ this
+ }
+
+ def tryCancel(): Unit =
+ tryUnfork()
+
+ def await(atMost: Duration)(implicit canawait: CanAwait): T = {
+ join() // TODO handle timeout also
+ (updater.get(this): @unchecked) match {
+ case Success(r) => r
+ case Failure(t) => throw t
+ }
+ }
+
+}
+
+
+private[concurrent] sealed abstract class State[T]
+
+
+case class Pending[T](callbacks: List[Try[T] => Any]) extends State[T]
+
+
+case class Success[T](result: T) extends State[T]
+
+
+case class Failure[T](throwable: Throwable) extends State[T]
+
+
+private[concurrent] final class ExecutionContextImpl extends ExecutionContext {
+ import ExecutionContextImpl._
+
+ val pool = {
+ val p = new ForkJoinPool
+ p.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler {
+ def uncaughtException(t: Thread, throwable: Throwable) {
+ Console.err.println(throwable.getMessage)
+ throwable.printStackTrace(Console.err)
+ }
+ })
+ p
+ }
+
+ @inline
+ private def executeTask(task: RecursiveAction) {
+ if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread])
+ task.fork()
+ else
+ pool execute task
+ }
+
+ def execute(task: Runnable) {
+ val action = new RecursiveAction { def compute() { task.run() } }
+ executeTask(action)
+ }
+
+ def execute[U](body: () => U) {
+ val action = new RecursiveAction { def compute() { body() } }
+ executeTask(action)
+ }
+
+ def task[T](body: => T): Task[T] = {
+ new TaskImpl(this, body)
+ }
+
+ def future[T](body: => T): Future[T] = {
+ val t = task(body)
+ t.start()
+ t.future
+ }
+
+ def promise[T]: Promise[T] =
+ new PromiseImpl[T](this)
+
+ def blocking[T](atMost: Duration)(body: =>T): T = blocking(body2awaitable(body), atMost)
+
+ def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = {
+ currentExecutionContext.get match {
+ case null => awaitable.await(atMost)(null) // outside - TODO - fix timeout case
+ case x if x eq this => this.blockingCall(awaitable) // inside an execution context thread on this executor
+ case x => x.blocking(awaitable, atMost)
+ }
+ }
+
+ private def blockingCall[T](b: Awaitable[T]): T = b match {
+ case fj: TaskImpl[_] if fj.executor.pool eq pool =>
+ fj.await(Duration.fromNanos(0))
+ case _ =>
+ var res: T = null.asInstanceOf[T]
+ @volatile var blockingDone = false
+ // TODO add exception handling here!
+ val mb = new ForkJoinPool.ManagedBlocker {
+ def block() = {
+ res = b.await(Duration.fromNanos(0))(CanAwaitEvidence)
+ blockingDone = true
+ true
+ }
+ def isReleasable = blockingDone
+ }
+ ForkJoinPool.managedBlock(mb, true)
+ res
+ }
+
+ def reportFailure(t: Throwable): Unit = {}
+
+}
+
+
+object ExecutionContextImpl {
+
+ private[concurrent] def currentExecutionContext: ThreadLocal[ExecutionContext] = new ThreadLocal[ExecutionContext] {
+ override protected def initialValue = null
+ }
+
+}
+
+
+
+
+
+
+
diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java
new file mode 100644
index 0000000000..5280d67854
--- /dev/null
+++ b/src/library/scala/concurrent/impl/AbstractPromise.java
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class AbstractPromise {
+ private volatile Object _ref = null;
+ protected final static AtomicReferenceFieldUpdater<AbstractPromise, Object> updater =
+ AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref");
+}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
new file mode 100644
index 0000000000..7984aa02b7
--- /dev/null
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -0,0 +1,139 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl
+
+
+
+import java.util.concurrent.{Callable, ExecutorService}
+import scala.concurrent.forkjoin._
+import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable}
+import scala.util.{ Duration, Try, Success, Failure }
+import scala.collection.mutable.Stack
+
+
+
+class ExecutionContextImpl(val executorService: AnyRef) extends ExecutionContext {
+ import ExecutionContextImpl._
+
+ def execute(runnable: Runnable): Unit = executorService match {
+ case fj: ForkJoinPool =>
+ if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
+ val fjtask = ForkJoinTask.adapt(runnable)
+ fjtask.fork
+ } else {
+ fj.execute(runnable)
+ }
+ case executorService: ExecutorService =>
+ executorService execute runnable
+ }
+
+ def execute[U](body: () => U): Unit = execute(new Runnable {
+ def run() = body()
+ })
+
+ def promise[T]: Promise[T] = new Promise.DefaultPromise[T]()(this)
+
+ def future[T](body: =>T): Future[T] = {
+ val p = promise[T]
+
+ dispatchFuture {
+ () =>
+ p complete {
+ try {
+ Success(body)
+ } catch {
+ case e => resolver(e)
+ }
+ }
+ }
+
+ p.future
+ }
+
+ def blocking[T](atMost: Duration)(body: =>T): T = blocking(body2awaitable(body), atMost)
+
+ def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = {
+ currentExecutionContext.get match {
+ case null => awaitable.await(atMost)(null) // outside - TODO - fix timeout case
+ case x => x.blockingCall(awaitable) // inside an execution context thread
+ }
+ }
+
+ def reportFailure(t: Throwable) = t match {
+ case e: Error => throw e // rethrow serious errors
+ case t => t.printStackTrace()
+ }
+
+ /** Only callable from the tasks running on the same execution context. */
+ private def blockingCall[T](body: Awaitable[T]): T = {
+ releaseStack()
+
+ // TODO see what to do with timeout
+ body.await(Duration.fromNanos(0))(CanAwaitEvidence)
+ }
+
+ // an optimization for batching futures
+ // TODO we should replace this with a public queue,
+ // so that it can be stolen from
+ // OR: a push to the local task queue should be so cheap that this is
+ // not even needed, but stealing is still possible
+ private val _taskStack = new ThreadLocal[Stack[() => Unit]]()
+
+ private def releaseStack(): Unit =
+ _taskStack.get match {
+ case stack if (stack ne null) && stack.nonEmpty =>
+ val tasks = stack.elems
+ stack.clear()
+ _taskStack.remove()
+ dispatchFuture(() => _taskStack.get.elems = tasks, true)
+ case null =>
+ // do nothing - there is no local batching stack anymore
+ case _ =>
+ _taskStack.remove()
+ }
+
+ private[impl] def dispatchFuture(task: () => Unit, force: Boolean = false): Unit =
+ _taskStack.get match {
+ case stack if (stack ne null) && !force => stack push task
+ case _ => this.execute(
+ new Runnable {
+ def run() {
+ try {
+ val taskStack = Stack[() => Unit](task)
+ _taskStack set taskStack
+ while (taskStack.nonEmpty) {
+ val next = taskStack.pop()
+ try {
+ next.apply()
+ } catch {
+ case e =>
+ // TODO catching all and continue isn't good for OOME
+ reportFailure(e)
+ }
+ }
+ } finally {
+ _taskStack.remove()
+ }
+ }
+ }
+ )
+ }
+
+}
+
+
+object ExecutionContextImpl {
+
+ private[concurrent] def currentExecutionContext: ThreadLocal[ExecutionContextImpl] = new ThreadLocal[ExecutionContextImpl] {
+ override protected def initialValue = null
+ }
+
+}
+
+
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
new file mode 100644
index 0000000000..9466761d4d
--- /dev/null
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -0,0 +1,89 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl
+
+import scala.concurrent.{Awaitable, ExecutionContext}
+import scala.util.{ Try, Success, Failure }
+//import scala.util.continuations._
+
+trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
+
+ implicit def executor: ExecutionContextImpl
+
+ /** For use only within a Future.flow block or another compatible Delimited Continuations reset block.
+ *
+ * Returns the result of this Future without blocking, by suspending execution and storing it as a
+ * continuation until the result is available.
+ */
+ //def apply(): T @cps[Future[Any]] = shift(this flatMap (_: T => Future[Any]))
+
+ /** Tests whether this Future has been completed.
+ */
+ final def isCompleted: Boolean = value.isDefined
+
+ /** The contained value of this Future. Before this Future is completed
+ * the value will be None. After completion the value will be Some(Right(t))
+ * if it contains a valid result, or Some(Left(error)) if it contains
+ * an exception.
+ */
+ def value: Option[Try[T]]
+
+ def onComplete[U](func: Try[T] => U): this.type
+
+ /** Creates a new Future[A] which is completed with this Future's result if
+ * that conforms to A's erased type or a ClassCastException otherwise.
+ */
+ final def mapTo[T](implicit m: Manifest[T]) = {
+ val p = executor.promise[T]
+
+ onComplete {
+ case f @ Failure(t) => p complete f.asInstanceOf[Try[T]]
+ case Success(v) =>
+ p complete (try {
+ Success(Future.boxedType(m.erasure).cast(v).asInstanceOf[T])
+ } catch {
+ case e: ClassCastException ⇒ Failure(e)
+ })
+ }
+
+ p.future
+ }
+
+ /** Used by for-comprehensions.
+ */
+ final def withFilter(p: T => Boolean) = new FutureWithFilter[T](this, p)
+
+ final class FutureWithFilter[+A](self: Future[A], p: A => Boolean) {
+ def foreach(f: A => Unit): Unit = self filter p foreach f
+ def map[B](f: A => B) = self filter p map f
+ def flatMap[B](f: A => Future[B]) = self filter p flatMap f
+ def withFilter(q: A => Boolean): FutureWithFilter[A] = new FutureWithFilter[A](self, x ⇒ p(x) && q(x))
+ }
+
+}
+
+object Future {
+ import java.{ lang => jl }
+
+ private val toBoxed = Map[Class[_], Class[_]](
+ classOf[Boolean] -> classOf[jl.Boolean],
+ classOf[Byte] -> classOf[jl.Byte],
+ classOf[Char] -> classOf[jl.Character],
+ classOf[Short] -> classOf[jl.Short],
+ classOf[Int] -> classOf[jl.Integer],
+ classOf[Long] -> classOf[jl.Long],
+ classOf[Float] -> classOf[jl.Float],
+ classOf[Double] -> classOf[jl.Double],
+ classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
+ )
+
+ def boxedType(c: Class[_]): Class[_] = {
+ if (c.isPrimitive) toBoxed(c) else c
+ }
+}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
new file mode 100644
index 0000000000..0087b71ea8
--- /dev/null
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -0,0 +1,258 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl
+
+
+
+import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS }
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
+import scala.concurrent.{Awaitable, ExecutionContext, resolve, resolver, blocking, CanAwait, TimeoutException}
+//import scala.util.continuations._
+import scala.util.Duration
+import scala.util.Try
+import scala.util
+import scala.annotation.tailrec
+//import scala.concurrent.NonDeterministic
+
+
+
+trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
+
+ def future = this
+
+ def newPromise[S]: Promise[S] = executor promise
+
+ // TODO refine answer and return types here from Any to type parameters
+ // then move this up in the hierarchy
+ /*
+ final def <<(value: T): Future[T] @cps[Future[Any]] = shift {
+ cont: (Future[T] => Future[Any]) =>
+ cont(complete(Right(value)))
+ }
+
+ final def <<(other: Future[T]): Future[T] @cps[Future[Any]] = shift {
+ cont: (Future[T] => Future[Any]) =>
+ val p = executor.promise[Any]
+ val thisPromise = this
+
+ thisPromise completeWith other
+ thisPromise onComplete { v =>
+ try {
+ p completeWith cont(thisPromise)
+ } catch {
+ case e => p complete resolver(e)
+ }
+ }
+
+ p.future
+ }
+ */
+ // TODO finish this once we introduce something like dataflow streams
+
+ /*
+ final def <<(stream: PromiseStreamOut[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] => Future[Any]) =>
+ val fr = executor.promise[Any]
+ val f = stream.dequeue(this)
+ f.onComplete { _ =>
+ try {
+ fr completeWith cont(f)
+ } catch {
+ case e =>
+ fr failure e
+ }
+ }
+ fr
+ }
+ */
+
+}
+
+
+object Promise {
+ def dur2long(dur: Duration): Long = if (dur.isFinite) dur.toNanos else Long.MaxValue
+
+ def EmptyPending[T](): FState[T] = emptyPendingValue.asInstanceOf[FState[T]]
+
+ /** Represents the internal state.
+ *
+ * [adriaan] it's unsound to make FState covariant (tryComplete won't type check)
+ */
+ sealed trait FState[T] { def value: Option[Try[T]] }
+
+ case class Pending[T](listeners: List[Try[T] => Any] = Nil) extends FState[T] {
+ def value: Option[Try[T]] = None
+ }
+
+ case class Success[T](value: Option[util.Success[T]] = None) extends FState[T] {
+ def result: T = value.get.get
+ }
+
+ case class Failure[T](value: Option[util.Failure[T]] = None) extends FState[T] {
+ def exception: Throwable = value.get.exception
+ }
+
+ private val emptyPendingValue = Pending[Nothing](Nil)
+
+ /** Default promise implementation.
+ */
+ class DefaultPromise[T](implicit val executor: ExecutionContextImpl) extends AbstractPromise with Promise[T] {
+ self =>
+
+ updater.set(this, Promise.EmptyPending())
+
+ protected final def tryAwait(atMost: Duration): Boolean = {
+ @tailrec
+ def awaitUnsafe(waitTimeNanos: Long): Boolean = {
+ if (value.isEmpty && waitTimeNanos > 0) {
+ val ms = NANOSECONDS.toMillis(waitTimeNanos)
+ val ns = (waitTimeNanos % 1000000l).toInt // as per object.wait spec
+ val start = System.nanoTime()
+ try {
+ synchronized {
+ while (value.isEmpty) wait(ms, ns)
+ }
+ } catch {
+ case e: InterruptedException =>
+ }
+
+ awaitUnsafe(waitTimeNanos - (System.nanoTime() - start))
+ } else
+ value.isDefined
+ }
+
+ executor.blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), Duration.fromNanos(0))
+ }
+
+ private def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
+ if (value.isDefined || tryAwait(atMost)) this
+ else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds")
+
+ def await(atMost: Duration)(implicit permit: CanAwait): T =
+ ready(atMost).value.get match {
+ case util.Failure(e) => throw e
+ case util.Success(r) => r
+ }
+
+ def value: Option[Try[T]] = getState.value
+
+ @inline
+ private[this] final def updater = AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]]
+
+ @inline
+ protected final def updateState(oldState: FState[T], newState: FState[T]): Boolean = updater.compareAndSet(this, oldState, newState)
+
+ @inline
+ protected final def getState: FState[T] = updater.get(this)
+
+ def tryComplete(value: Try[T]): Boolean = {
+ val callbacks: List[Try[T] => Any] = {
+ try {
+ @tailrec
+ def tryComplete(v: Try[T]): List[Try[T] => Any] = {
+ getState match {
+ case cur @ Pending(listeners) =>
+ val newState =
+ if (v.isFailure) Failure(Some(v.asInstanceOf[util.Failure[T]]))
+ else Success(Some(v.asInstanceOf[util.Success[T]]))
+
+ if (updateState(cur, newState)) listeners
+ else tryComplete(v)
+ case _ => null
+ }
+ }
+ tryComplete(resolve(value))
+ } finally {
+ synchronized { notifyAll() } // notify any blockers from `tryAwait`
+ }
+ }
+
+ callbacks match {
+ case null => false
+ case cs if cs.isEmpty => true
+ case cs =>
+ executor dispatchFuture {
+ () => cs.foreach(f => notifyCompleted(f, value))
+ }
+ true
+ }
+ }
+
+ def onComplete[U](func: Try[T] => U): this.type = {
+ @tailrec // Returns whether the future has already been completed or not
+ def tryAddCallback(): Boolean = {
+ val cur = getState
+ cur match {
+ case _: Success[_] | _: Failure[_] => true
+ case p: Pending[_] =>
+ val pt = p.asInstanceOf[Pending[T]]
+ if (updateState(pt, pt.copy(listeners = func :: pt.listeners))) false else tryAddCallback()
+ }
+ }
+
+ if (tryAddCallback()) {
+ val result = value.get
+ executor dispatchFuture {
+ () => notifyCompleted(func, result)
+ }
+ }
+
+ this
+ }
+
+ private final def notifyCompleted(func: Try[T] => Any, result: Try[T]) {
+ try {
+ func(result)
+ } catch {
+ case e => executor.reportFailure(e)
+ }
+ }
+ }
+
+ /** An already completed Future is given its result at creation.
+ *
+ * Useful in Future-composition when a value to contribute is already available.
+ */
+ final class KeptPromise[T](suppliedValue: Try[T])(implicit val executor: ExecutionContextImpl) extends Promise[T] {
+ val value = Some(resolve(suppliedValue))
+
+ def tryComplete(value: Try[T]): Boolean = false
+
+ def onComplete[U](func: Try[T] => U): this.type = {
+ val completedAs = value.get
+ executor dispatchFuture {
+ () => func(completedAs)
+ }
+ this
+ }
+
+ private def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+
+ def await(atMost: Duration)(implicit permit: CanAwait): T = value.get match {
+ case util.Failure(e) => throw e
+ case util.Success(r) => r
+ }
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
index 92220a8313..2cea29aefe 100644
--- a/src/library/scala/concurrent/ops.scala
+++ b/src/library/scala/concurrent/ops.scala
@@ -15,6 +15,7 @@ import scala.util.control.Exception.allCatch
*
* @author Martin Odersky, Stepan Koltsov, Philipp Haller
*/
+@deprecated("Use `future` instead.", "2.10.0")
object ops
{
val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
new file mode 100644
index 0000000000..7cc48c09b2
--- /dev/null
+++ b/src/library/scala/concurrent/package.scala
@@ -0,0 +1,57 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+import scala.util.{ Duration, Try, Success, Failure }
+
+/** This package object contains primitives for concurrent and parallel programming.
+ */
+package object concurrent extends scala.concurrent.ConcurrentPackageObject {
+ type ExecutionException = java.util.concurrent.ExecutionException
+ type CancellationException = java.util.concurrent.CancellationException
+ type TimeoutException = java.util.concurrent.TimeoutException
+}
+
+package concurrent {
+ object await {
+ def ready[T](atMost: Duration)(awaitable: Awaitable[T])(implicit execCtx: ExecutionContext = executionContext): Awaitable[T] = {
+ try blocking(awaitable, atMost)
+ catch { case _ => }
+ awaitable
+ }
+
+ def result[T](atMost: Duration)(awaitable: Awaitable[T])(implicit execCtx: ExecutionContext = executionContext): T = {
+ blocking(awaitable, atMost)
+ }
+ }
+
+ /** Importing this object allows using some concurrency primitives
+ * on futures and promises that can yield nondeterministic programs.
+ *
+ * While program determinism is broken when using these primitives,
+ * some programs cannot be written without them (e.g. multiple client threads
+ * cannot send requests to a server thread through regular promises and futures).
+ */
+ object nondeterministic { }
+
+ /** A timeout exception.
+ *
+ * Futures are failed with a timeout exception when their timeout expires.
+ *
+ * Each timeout exception contains an origin future which originally timed out.
+ */
+ class FutureTimeoutException(origin: Future[_], message: String) extends TimeoutException(message) {
+ def this(origin: Future[_]) = this(origin, "Future timed out.")
+ }
+
+ final class DurationOps private[concurrent] (x: Int) {
+ // TODO ADD OTHERS
+ def ns = util.Duration.fromNanos(0)
+ }
+}
diff --git a/src/library/scala/concurrent/package.scala.disabled b/src/library/scala/concurrent/package.scala.disabled
deleted file mode 100644
index 42b4bf954c..0000000000
--- a/src/library/scala/concurrent/package.scala.disabled
+++ /dev/null
@@ -1,108 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-
-
-
-/** This package object contains primitives for parallel programming.
- */
-package object concurrent {
-
- /** Performs a call which can potentially block execution.
- *
- * Example:
- * {{{
- * val lock = new ReentrantLock
- *
- * // ... do something ...
- *
- * blocking {
- * if (!lock.hasLock) lock.lock()
- * }
- * }}}
- *
- * '''Note:''' calling methods that wait arbitrary amounts of time
- * (e.g. for I/O operations or locks) may severely decrease performance
- * or even result in deadlocks. This does not include waiting for
- * results of futures.
- *
- * @tparam T the result type of the blocking operation
- * @param body the blocking operation
- * @param runner the runner used for parallel computations
- * @return the result of the potentially blocking operation
- */
- def blocking[T](body: =>T)(implicit runner: TaskRunner): T = {
- null.asInstanceOf[T]
- }
-
- /** Invokes a computation asynchronously. Does not wait for the computation
- * to finish.
- *
- * @tparam U the result type of the operation
- * @param p the computation to be invoked asynchronously
- * @param runner the runner used for parallel computations
- */
- def spawn[U](p: =>U)(implicit runner: TaskRunner): Unit = {
- }
-
- /** Starts 2 parallel computations and returns once they are completed.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param runner the runner used for parallel computations
- * @return a tuple of results corresponding to parallel computations
- */
- def par[T1, T2](b1: =>T1)(b2: =>T2)(implicit runner: TaskRunner): (T1, T2) = {
- null
- }
-
- /** Starts 3 parallel computations and returns once they are completed.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @param runner the runner used for parallel computations
- * @return a tuple of results corresponding to parallel computations
- */
- def par[T1, T2, T3](b1: =>T1)(b2: =>T2)(b3: =>T3)(implicit runner: TaskRunner): (T1, T2, T3) = {
- null
- }
-
- /** Starts 4 parallel computations and returns once they are completed.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @tparam T4 the type of the result of 4th the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @param b4 the 4th computation to be invoked in parallel
- * @param runner the runner used for parallel computations
- * @return a tuple of results corresponding to parallel computations
- */
- def par[T1, T2, T3, T4](b1: =>T1)(b2: =>T2)(b3: =>T3)(b4: =>T4)(implicit runner: TaskRunner): (T1, T2, T3, T4) = {
- null
- }
-
-}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 0c5d10b15e..366af34ee9 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -27,6 +27,12 @@ package object scala {
type NoSuchElementException = java.util.NoSuchElementException
type NumberFormatException = java.lang.NumberFormatException
type AbstractMethodError = java.lang.AbstractMethodError
+ type InterruptedException = java.lang.InterruptedException
+
+ // A dummy used by the specialization annotation.
+ val AnyRef = new Specializable {
+ override def toString = "object AnyRef"
+ }
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
type serializable = annotation.serializable
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index 466b57dea7..d393ac47fa 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -46,7 +46,9 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
/** The Scala type described by this manifest.
*/
- lazy val tpe: mirror.Type = reflect.mirror.classToType(erasure)
+ lazy val tpe: mirror.Type = mirror.classToType(erasure)
+
+ def symbol: mirror.Symbol = mirror.classToSymbol(erasure)
private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = {
diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala
deleted file mode 100644
index 52705d302c..0000000000
--- a/src/library/scala/reflect/Code.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-class Code[T: Manifest](val tree: scala.reflect.mirror.Tree) {
- val manifest = implicitly[Manifest[T]]
- override def toString = "Code(tree = "+tree+", manifest = "+manifest+")"
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-object Code {
- def lift[A](tree: A): Code[A] =
- throw new Error("Code was not lifted by compiler")
-}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 8bd45c0e33..e5df487be9 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -76,6 +76,8 @@ abstract class AnyValManifest[T <: AnyVal](override val toString: String) extend
* in client code.
*/
object Manifest {
+ import mirror.{ definitions => mdefs }
+
def valueManifests: List[AnyValManifest[_]] =
List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
@@ -152,34 +154,40 @@ object Manifest {
}
val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any]("Any") {
+ override def symbol = mdefs.AnyClass
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
private def readResolve(): Any = Manifest.Any
}
val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object]("Object") {
+ override def symbol = mdefs.ObjectClass
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.Object
}
val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal]("AnyVal") {
+ override def symbol = mdefs.AnyValClass
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.AnyVal
}
val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null]("Null") {
+ override def symbol = mdefs.NullClass
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
private def readResolve(): Any = Manifest.Null
}
val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing]("Nothing") {
+ override def symbol = mdefs.NothingClass
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
private def readResolve(): Any = Manifest.Nothing
}
private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] {
lazy val erasure = value.getClass
- override lazy val tpe = mirror.SingleType(mirror.NoPrefix, InstanceRefSymbol(value)) // todo: change to freevar
+ override lazy val symbol = InstanceRefSymbol(value) // todo: change to freevar
+ override lazy val tpe = mirror.SingleType(mirror.NoPrefix, symbol)
override lazy val toString = value.toString + ".type"
}
@@ -208,8 +216,12 @@ object Manifest {
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+ /** Phantom types have no runtime representation; they all erase to Object,
+ * but the Symbol preserves their identity.
+ */
private abstract class PhantomManifest[T](override val toString: String) extends ClassTypeManifest[T](None, classOf[java.lang.Object], Nil) {
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override lazy val tpe = namedType(mirror.NoPrefix, symbol, Nil)
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
override val hashCode = System.identityHashCode(this)
}
@@ -218,13 +230,13 @@ object Manifest {
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
val erasure: Predef.Class[_],
override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
+
override lazy val tpe = {
- val clazz = classToSymbol(erasure)
val pre = prefix match {
case Some(pm) => pm.tpe
- case None => clazz.owner.thisType
+ case None => symbol.owner.thisPrefix
}
- namedType(pre, clazz, typeArguments map (_.tpe))
+ namedType(pre, symbol, typeArguments map (_.tpe))
}
override def toString =
@@ -282,8 +294,9 @@ object Manifest {
* instead of in scala-compiler.jar.
*/
def apply[T](_tpe: mirror.Type): Manifest[T] = new Manifest[T] {
+ override def symbol = _tpe.typeSymbol
override lazy val tpe = _tpe
- override def erasure = mirror.typeToClass(_tpe.erasedType)
+ override def erasure = mirror.typeToClass(_tpe.erasedType)
override def toString = _tpe.toString
}
}
diff --git a/src/library/scala/reflect/ReflectionUtils.scala b/src/library/scala/reflect/ReflectionUtils.scala
index dfadfb4976..510f0819c6 100644
--- a/src/library/scala/reflect/ReflectionUtils.scala
+++ b/src/library/scala/reflect/ReflectionUtils.scala
@@ -29,13 +29,13 @@ object ReflectionUtils {
def singletonInstance(className: String, cl: ClassLoader = getClass.getClassLoader): AnyRef = {
val name = if (className endsWith "$") className else className + "$"
- val clazz = java.lang.Class.forName(name, true, cl)
+ val clazz = java.lang.Class.forName(name, true, cl)
val singleton = clazz getField "MODULE$" get null
singleton
}
// Retrieves the MODULE$ field for the given class name.
- def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
+ def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
try Some(singletonInstance(className, cl))
catch { case _: ClassNotFoundException => None }
}
diff --git a/src/library/scala/reflect/api/Mirror.scala b/src/library/scala/reflect/api/Mirror.scala
index 136f52b05f..cea9e1a37d 100644
--- a/src/library/scala/reflect/api/Mirror.scala
+++ b/src/library/scala/reflect/api/Mirror.scala
@@ -3,57 +3,59 @@ package api
/** A mirror establishes connections of
* runtime entities such as class names and object instances
- * with a refexive universe.
+ * with a reflexive universe.
*/
trait Mirror extends Universe with RuntimeTypes with TreeBuildUtil {
/** The Scala class symbol that has given fully qualified name
* @param name The fully qualified name of the class to be returned
- * @throws java.lang.ClassNotFoundException if no class wiht that name exists
+ * @throws java.lang.ClassNotFoundException if no class with that name exists
* to do: throws anything else?
*/
- def classWithName(name: String): Symbol
-
- /** Return a reference to the companion object of this class symbol
+ def symbolForName(name: String): Symbol
+
+ /** Return a reference to the companion object of the given class symbol.
*/
- def getCompanionObject(clazz: Symbol): AnyRef
-
- /** The Scala class symbol corresponding to the runtime class of given object
- * @param The object from which the class is returned
+ def companionInstance(clazz: Symbol): AnyRef
+
+ /** The Scala class symbol corresponding to the runtime class of the given instance.
+ * @param instance The instance
+ * @return The class Symbol for the instance
* @throws ?
*/
- def getClass(obj: AnyRef): Symbol
+ def symbolOfInstance(instance: Any): Symbol
- /** The Scala type corresponding to the runtime type of given object.
+ /** The Scala type corresponding to the runtime type of given instance.
* If the underlying class is parameterized, this will be an existential type,
* with unknown type arguments.
*
- * @param The object from which the type is returned
+ * @param instance The instance.
+ * @return The Type of the given instance.
* @throws ?
*/
- def getType(obj: AnyRef): Type
+ def typeOfInstance(instance: Any): Type
/** The value of a field on a receiver instance.
* @param receiver The receiver instance
* @param field The field
* @return The value contained in `receiver.field`.
*/
- def getValue(receiver: AnyRef, field: Symbol): Any
+ def getValueOfField(receiver: AnyRef, field: Symbol): Any
/** Sets the value of a field on a receiver instance.
* @param receiver The receiver instance
* @param field The field
* @param value The new value to be stored in the field.
*/
- def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit
+ def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit
- /** Invokes a method on a reciver instance with some arguments
+ /** Invokes a method on a receiver instance with some arguments
* @param receiver The receiver instance
* @param meth The method
* @param args The method call's arguments
* @return The result of invoking `receiver.meth(args)`
*/
- def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any
+ def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any
/** Maps a Java class to a Scala type reference
* @param clazz The Java class object
diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala
index 8569b103cf..cbfe91e59b 100644
--- a/src/library/scala/reflect/api/Modifier.scala
+++ b/src/library/scala/reflect/api/Modifier.scala
@@ -1,11 +1,82 @@
package scala.reflect.api
-object Modifier extends Enumeration {
+import collection.{ immutable, mutable }
- val `protected`, `private`, `override`, `abstract`, `final`,
- `sealed`, `implicit`, `lazy`, `macro`, `case`, `trait`,
- deferred, interface, mutable, parameter, covariant, contravariant,
- preSuper, abstractOverride, local, java, static, caseAccessor,
- defaultParameter, defaultInit, paramAccessor, bynameParameter = Value
+sealed abstract class Modifier {
+ def name: String
+ def isKeyword: Boolean
+ def sourceString: String = if (isKeyword) "`" + name + "`" else name
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = name.hashCode
+ override def toString = name
+}
+final class SymbolModifier private (val name: String, val isKeyword: Boolean) extends Modifier {
+ def this(name: String) = this(name, false)
+}
+final class SourceModifier private (val name: String) extends Modifier {
+ def isKeyword = true
+}
+
+object SymbolModifier {
+ private val seen = mutable.ListBuffer[SymbolModifier]()
+ private[api] def apply(name: String): SymbolModifier = {
+ val mod = name match {
+ case "case" | "trait" => new SymbolModifier(name, isKeyword = true)
+ case _ => new SymbolModifier(name)
+ }
+ seen += mod
+ mod
+ }
+ private[api] def all = seen.toList
+}
+object SourceModifier {
+ private val seen = mutable.ListBuffer[SourceModifier]()
+ private[api] def apply(name: String): SourceModifier = {
+ val mod = new SourceModifier(name)
+ seen += mod
+ mod
+ }
+ private[api] def all = seen.toList
+}
+
+object Modifier extends immutable.Set[Modifier] {
+ val `abstract` = SourceModifier("abstract")
+ val `final` = SourceModifier("final")
+ val `implicit` = SourceModifier("implicit")
+ val `lazy` = SourceModifier("lazy")
+ val `macro` = SourceModifier("macro")
+ val `override` = SourceModifier("override")
+ val `private` = SourceModifier("private")
+ val `protected` = SourceModifier("protected")
+ val `sealed` = SourceModifier("sealed")
+
+ val `case` = SymbolModifier("case")
+ val `trait` = SymbolModifier("trait")
+ val abstractOverride = SymbolModifier("abstractOverride")
+ val bynameParameter = SymbolModifier("bynameParameter")
+ val caseAccessor = SymbolModifier("caseAccessor")
+ val contravariant = SymbolModifier("contravariant")
+ val covariant = SymbolModifier("covariant")
+ val defaultInit = SymbolModifier("defaultInit")
+ val defaultParameter = SymbolModifier("defaultParameter")
+ val deferred = SymbolModifier("deferred")
+ val interface = SymbolModifier("interface")
+ val java = SymbolModifier("java")
+ val local = SymbolModifier("local")
+ val mutable = SymbolModifier("mutable")
+ val paramAccessor = SymbolModifier("paramAccessor")
+ val parameter = SymbolModifier("parameter")
+ val preSuper = SymbolModifier("preSuper")
+ val static = SymbolModifier("static")
+
+ val sourceModifiers: Set[SourceModifier] = SourceModifier.all.toSet
+ val symbolModifiers: Set[SymbolModifier] = SymbolModifier.all.toSet
+ val allModifiers: Set[Modifier] = sourceModifiers ++ symbolModifiers
+ def values = allModifiers
+
+ def contains(key: Modifier) = allModifiers(key)
+ def iterator = allModifiers.iterator
+ def -(elem: Modifier) = allModifiers - elem
+ def +(elem: Modifier) = allModifiers + elem
}
diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala
index 9498f0af36..c72774dfc7 100755
--- a/src/library/scala/reflect/api/Names.scala
+++ b/src/library/scala/reflect/api/Names.scala
@@ -6,12 +6,11 @@ package api
* The same string can be a name in both universes.
* Two names are equal if they represent the same string and they are
* members of the same universe.
- *
+ *
* Names are interned. That is, for two names `name11 and `name2`,
* `name1 == name2` implies `name1 eq name2`.
*/
trait Names {
-
/** The abstract type of names */
type Name >: Null <: AbsName
@@ -37,12 +36,20 @@ trait Names {
/** Replaces all occurrences of $op_names in this name by corresponding operator symbols.
* Example: `foo_+=` becomes `foo_$plus$eq`.
*/
- def decode: String
+ def decoded: String
/** Replaces all occurrences of operator symbols in this name by corresponding $op_names.
* Example: `foo_$plus$eq` becomes `foo_+=`
*/
- def encode: Name
+ def encoded: String
+
+ /** The decoded name, still represented as a name.
+ */
+ def decodedName: Name
+
+ /** The encoded name, still represented as a name.
+ */
+ def encodedName: Name
}
/** Create a new term name.
diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala
index 3526cf259d..e737b0ea4f 100755
--- a/src/library/scala/reflect/api/StandardDefinitions.scala
+++ b/src/library/scala/reflect/api/StandardDefinitions.scala
@@ -11,14 +11,11 @@ trait StandardDefinitions { self: Universe =>
val definitions: AbsDefinitions
abstract class AbsDefinitions {
- // outer packages and their classes
- def RootPackage: Symbol // under consideration
+ // packages
+ def RootPackage: Symbol
def RootClass: Symbol
def EmptyPackage: Symbol
- def EmptyPackageClass: Symbol
-
def ScalaPackage: Symbol
- def ScalaPackageClass: Symbol
// top types
def AnyClass : Symbol
@@ -54,17 +51,19 @@ trait StandardDefinitions { self: Universe =>
// fundamental modules
def PredefModule: Symbol
- // fundamental type constructions
- def ClassType(arg: Type): Type
+ /** Given a type T, returns the type corresponding to the VM's
+ * representation: ClassClass's type constructor applied to `arg`.
+ */
+ def vmClassType(arg: Type): Type // !!! better name?
/** The string representation used by the given type in the VM.
*/
- def signature(tp: Type): String
+ def vmSignature(sym: Symbol, info: Type): String
/** Is symbol one of the value classes? */
- def isValueClass(sym: Symbol): Boolean
+ def isValueClass(sym: Symbol): Boolean // !!! better name?
/** Is symbol one of the numeric value classes? */
- def isNumericValueClass(sym: Symbol): Boolean
+ def isNumericValueClass(sym: Symbol): Boolean // !!! better name?
}
}
diff --git a/src/library/scala/reflect/api/StandardNames.scala b/src/library/scala/reflect/api/StandardNames.scala
new file mode 100644
index 0000000000..81517d2a6b
--- /dev/null
+++ b/src/library/scala/reflect/api/StandardNames.scala
@@ -0,0 +1,21 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package api
+
+trait StandardNames { self: Universe =>
+
+ val nme: AbsTermNames
+
+ abstract class AbsTermNames {
+ val CONSTRUCTOR: TermName
+ }
+
+ val tpnme: AbsTypeNames
+
+ abstract class AbsTypeNames {
+ }
+}
diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala
index 17d9b06324..44dc2ce1c2 100755
--- a/src/library/scala/reflect/api/Symbols.scala
+++ b/src/library/scala/reflect/api/Symbols.scala
@@ -9,11 +9,20 @@ trait Symbols { self: Universe =>
/** The modifiers of this symbol
*/
- def allModifiers: Set[Modifier.Value]
+ def modifiers: Set[Modifier]
/** Does this symbol have given modifier?
*/
- def hasModifier(mod: Modifier.Value): Boolean
+ def hasModifier(mod: Modifier): Boolean
+
+ /** A list of annotations attached to this Symbol.
+ */
+ def annotations: List[self.AnnotationInfo]
+
+ /** Whether this symbol carries an annotation for which the given
+ * symbol is its typeSymbol.
+ */
+ def hasAnnotation(sym: Symbol): Boolean
/** The owner of this symbol. This is the symbol
* that directly contains the current symbol's definition.
@@ -30,14 +39,6 @@ trait Symbols { self: Universe =>
*/
def name: Name
- /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
- */
- def encodedName: String
-
- /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
- */
- def decodedName: String
-
/** The encoded full path name of this symbol, where outer names and inner names
* are separated by periods.
*/
@@ -66,49 +67,43 @@ trait Symbols { self: Universe =>
*
* The java access levels translate as follows:
*
- * java private: hasFlag(PRIVATE) && !hasAccessBoundary
- * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosing package)
- * java protected: hasFlag(PROTECTED) && (privateWithin == enclosing package)
- * java public: !hasFlag(PRIVATE | PROTECTED) && !hasAccessBoundary
+ * java private: hasFlag(PRIVATE) && (privateWithin == NoSymbol)
+ * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosingPackage)
+ * java protected: hasFlag(PROTECTED) && (privateWithin == enclosingPackage)
+ * java public: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == NoSymbol)
*/
def privateWithin: Symbol
- /** Whether this symbol has a "privateWithin" visibility barrier attached.
- */
- def hasAccessBoundary: Boolean
-
- /** A list of annotations attached to this Symbol.
- */
- def getAnnotations: List[self.AnnotationInfo]
-
/** For a class: the module or case class factory with the same name in the same package.
+ * For a module: the class with the same name in the same package.
* For all others: NoSymbol
*/
- def companionModule: Symbol
-
- /** For a module: the class with the same name in the same package.
- * For all others: NoSymbol
- */
- def companionClass: Symbol
-
- /** The module corresponding to this module class (note that this
- * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass
- */
- def sourceModule: Symbol
+ def companionSymbol: Symbol
/** If symbol is an object definition, its implied associated class,
* otherwise NoSymbol
*/
def moduleClass: Symbol // needed for LiftCode
- /** The top-level class containing this symbol. */
- def toplevelClass: Symbol
+ /** If this symbol is a top-level class, this symbol; otherwise the next enclosing
+ * top-level class, or `NoSymbol` if none exists.
+ */
+ def enclosingTopLevelClass: Symbol
- /** The next enclosing class, or `NoSymbol` if none exists */
- def enclClass : Symbol
+ /** If this symbol is a class, this symbol; otherwise the next enclosing
+ * class, or `NoSymbol` if none exists.
+ */
+ def enclosingClass: Symbol
- /** The next enclosing method, or `NoSymbol` if none exists */
- def enclMethod : Symbol
+ /** If this symbol is a method, this symbol; otherwise the next enclosing
+ * method, or `NoSymbol` if none exists.
+ */
+ def enclosingMethod: Symbol
+
+ /** If this symbol is a package class, this symbol; otherwise the next enclosing
+ * package class, or `NoSymbol` if none exists.
+ */
+ def enclosingPackageClass: Symbol
/** Does this symbol represent the definition of term?
* Note that every symbol is either a term or a type.
@@ -141,13 +136,13 @@ trait Symbols { self: Universe =>
/** The type signature of this symbol.
* Note if the symbol is a member of a class, one almost always is interested
- * in `typeSigIn` with a site type instead.
+ * in `typeSignatureIn` with a site type instead.
*/
- def typeSig: Type
+ def typeSignature: Type // !!! Since one should almost never use this, let's give it a different name.
/** The type signature of this symbol seen as a member of given type `site`.
*/
- def typeSigIn(site: Type): Type
+ def typeSignatureIn(site: Type): Type
/** A type reference that refers to this type symbol
* Note if symbol is a member of a class, one almost always is interested
@@ -156,11 +151,11 @@ trait Symbols { self: Universe =>
* Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
* `C`. Then `C.asType` is the type `C[T]`.
*
- * By contrast, `C.typeSig` would be a type signature of form
+ * By contrast, `C.typeSignature` would be a type signature of form
* `PolyType(ClassInfoType(...))` that describes type parameters, value
* parameters, parent types, and members of `C`.
*/
- def asType: Type
+ def asType: Type // !!! Same as typeSignature.
/** A type reference that refers to this type symbol seen
* as a member of given type `site`.
@@ -172,37 +167,37 @@ trait Symbols { self: Universe =>
* are part of results of `asType`, but not of `asTypeConstructor`.
*
* Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
- * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeCponstructor` is `C`.
+ * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeConstructor` is `C`.
*/
def asTypeConstructor: Type // needed by LiftCode
- /** If this symbol is a class or trait, its self type, otherwise the type
- * of the symbol itself.
+ /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`.
*/
- def typeOfThis: Type
+ def thisPrefix: Type
- /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`.
+ /** If this symbol is a class or trait, its self type, otherwise the type
+ * of the symbol itself.
*/
- def thisType: Type
+ def selfType: Type
/** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
- * the current symbol as its owner.
+ * the current symbol as its owner.
*/
def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode
/** Low-level operation to set the symbol's flags
* @return the symbol itself
*/
- def setInternalFlags(flags: Long): this.type // needed by LiftCode
+ def setInternalFlags(flags: Long): this.type // needed by LiftCode !!! not enough reason to have in the api
/** Set symbol's type signature to given type
* @return the symbol itself
*/
- def setTypeSig(tpe: Type): this.type // needed by LiftCode
+ def setTypeSignature(tpe: Type): this.type // needed by LiftCode !!! not enough reason to have in the api
/** Set symbol's annotations to given annotations `annots`.
*/
- def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode
+ def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode !!! not enough reason to have in the api
}
val NoSymbol: Symbol
diff --git a/src/library/scala/reflect/api/TreeBuildUtil.scala b/src/library/scala/reflect/api/TreeBuildUtil.scala
index b437824925..f28008bc21 100644
--- a/src/library/scala/reflect/api/TreeBuildUtil.scala
+++ b/src/library/scala/reflect/api/TreeBuildUtil.scala
@@ -3,19 +3,19 @@ package scala.reflect.api
trait TreeBuildUtil extends Universe {
/** The symbol corresponding to the globally accessible class with the
- * given fully qualified name `fullname`.
+ * given fully qualified name `fullName`.
*/
- def staticClass(fullname: String): Symbol
+ def staticClass(fullName: String): Symbol
/** The symbol corresponding to the globally accessible object with the
- * given fully qualified name `fullname`.
+ * given fully qualified name `fullName`.
*/
- def staticModule(fullname: String): Symbol
+ def staticModule(fullName: String): Symbol
/** The this-ptype of the globally accessible object with the
- * given fully qualified name `fullname`.
+ * given fully qualified name `fullName`.
*/
- def thisModuleType(fullname: String): Type
+ def thisModuleType(fullName: String): Type
/** Selects type symbol with given simple name `name` from the defined members of `owner`.
*/
@@ -38,7 +38,7 @@ trait TreeBuildUtil extends Universe {
* @param tsig the type signature of the free variable
* @param value the value of the free variable at runtime
*/
- def freeVar(name: String, tsig: Type, value: Any): Symbol
+ def newFreeVar(name: String, info: Type, value: Any): Symbol
/** Create a Modiiers structure given internal flags, qualifier, annotations */
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers
diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala
index 88ef450ed9..21b55e9c0e 100644
--- a/src/library/scala/reflect/api/TreePrinters.scala
+++ b/src/library/scala/reflect/api/TreePrinters.scala
@@ -31,18 +31,6 @@ trait TreePrinters { self: Universe =>
// emits more or less verbatim representation of the provided tree
// todo. when LiftCode becomes a macro, throw this code away and use that macro
class RawTreePrinter(out: PrintWriter) extends TreePrinter {
- import scala.reflect.api.Modifier
- import scala.reflect.api.Modifier._
-
- def copypasteModifier(mod: Modifier.Value): String = mod match {
- case mod @ (
- `protected` | `private` | `override` |
- `abstract` | `final` | `sealed` |
- `implicit` | `lazy` | `macro` |
- `case` | `trait`) => "`" + mod.toString + "`"
- case mod => mod.toString
- }
-
def print(args: Any*): Unit = args foreach {
case EmptyTree =>
print("EmptyTree")
@@ -67,7 +55,7 @@ trait TreePrinters { self: Universe =>
print(")")
if (typesPrinted)
print(".setType(", tree.tpe, ")")
- case list: List[_] =>
+ case list: List[_] =>
print("List(")
val it = list.iterator
while (it.hasNext) {
@@ -76,16 +64,16 @@ trait TreePrinters { self: Universe =>
}
print(")")
case mods: Modifiers =>
- val parts = collection.mutable.ListBuffer[String]()
- parts += "Set(" + mods.allModifiers.map{copypasteModifier}.mkString(", ") + ")"
+ val parts = collection.mutable.ListBuffer[String]()
+ parts += "Set(" + mods.modifiers.map(_.sourceString).mkString(", ") + ")"
parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")"
-
+
var keep = 3
if (keep == 3 && mods.annotations.isEmpty) keep -= 1
if (keep == 2 && mods.privateWithin == EmptyTypeName) keep -= 1
- if (keep == 1 && mods.allModifiers.isEmpty) keep -= 1
-
+ if (keep == 1 && mods.modifiers.isEmpty) keep -= 1
+
print("Modifiers(", parts.take(keep).mkString(", "), ")")
case name: Name =>
if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala
index 0a38fb45bf..181ce85dac 100644
--- a/src/library/scala/reflect/api/Trees.scala
+++ b/src/library/scala/reflect/api/Trees.scala
@@ -13,17 +13,17 @@ trait Trees { self: Universe =>
private[scala] var nodeCount = 0
- type Modifiers <: AbsModifiers
+ type Modifiers >: Null <: AbsModifiers
abstract class AbsModifiers {
- def hasModifier(mod: Modifier.Value): Boolean
- def allModifiers: Set[Modifier.Value]
+ def modifiers: Set[Modifier]
+ def hasModifier(mod: Modifier): Boolean
def privateWithin: Name // default: EmptyTypeName
def annotations: List[Tree] // default: List()
def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers
}
- def Modifiers(mods: Set[Modifier.Value] = Set(),
+ def Modifiers(mods: Set[Modifier] = Set(),
privateWithin: Name = EmptyTypeName,
annotations: List[Tree] = List()): Modifiers
@@ -439,6 +439,12 @@ trait Trees { self: Universe =>
case class Assign(lhs: Tree, rhs: Tree)
extends TermTree
+ /** Either an assignment or a named argument. Only appears in argument lists,
+ * eliminated by typecheck (doTypedApply)
+ */
+ case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
+ extends TermTree
+
/** Conditional expression */
case class If(cond: Tree, thenp: Tree, elsep: Tree)
extends TermTree
@@ -476,6 +482,20 @@ trait Trees { self: Universe =>
*/
case class New(tpt: Tree) extends TermTree
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
+ */
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
+ // todo. we need to expose names in scala.reflect.api
+ val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
+ if (argss.isEmpty) Apply(superRef, Nil)
+ else (superRef /: argss) (Apply)
+ }
+ /** 0-1 argument list new, based on a type.
+ */
+ def New(tpe: Type, args: Tree*): Tree =
+ New(TypeTree(tpe), List(args.toList))
+
/** Type annotation, eliminated by explicit outer */
case class Typed(expr: Tree, tpt: Tree)
extends TermTree
@@ -632,10 +652,10 @@ trait Trees { self: Universe =>
}
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
-
+
/** An empty deferred value definition corresponding to:
* val _: _
- * This is used as a placeholder in the `self` parameter Template if there is
+ * This is used as a placeholder in the `self` parameter Template if there is
* no definition of a self value of self type.
*/
def emptyValDef: ValDef
@@ -644,6 +664,96 @@ trait Trees { self: Universe =>
val treeCopy = newLazyTreeCopier
+ def copyDefDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tparams: List[TypeDef] = null,
+ vparamss: List[List[ValDef]] = null,
+ tpt: Tree = null,
+ rhs: Tree = null
+ ): DefDef = tree match {
+ case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
+ treeCopy.DefDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tparams eq null) tparams0 else tparams,
+ if (vparamss eq null) vparamss0 else vparamss,
+ if (tpt eq null) tpt0 else tpt,
+ if (rhs eq null) rhs0 else rhs
+ )
+ case t =>
+ sys.error("Not a DefDef: " + t + "/" + t.getClass)
+ }
+ def copyValDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tpt: Tree = null,
+ rhs: Tree = null
+ ): ValDef = tree match {
+ case ValDef(mods0, name0, tpt0, rhs0) =>
+ treeCopy.ValDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tpt eq null) tpt0 else tpt,
+ if (rhs eq null) rhs0 else rhs
+ )
+ case t =>
+ sys.error("Not a ValDef: " + t + "/" + t.getClass)
+ }
+ def copyClassDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tparams: List[TypeDef] = null,
+ impl: Template = null
+ ): ClassDef = tree match {
+ case ClassDef(mods0, name0, tparams0, impl0) =>
+ treeCopy.ClassDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tparams eq null) tparams0 else tparams,
+ if (impl eq null) impl0 else impl
+ )
+ case t =>
+ sys.error("Not a ClassDef: " + t + "/" + t.getClass)
+ }
+
+ def deriveDefDef(ddef: Tree)(applyToRhs: Tree => Tree): DefDef = ddef match {
+ case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
+ treeCopy.DefDef(ddef, mods0, name0, tparams0, vparamss0, tpt0, applyToRhs(rhs0))
+ case t =>
+ sys.error("Not a DefDef: " + t + "/" + t.getClass)
+ }
+ def deriveValDef(vdef: Tree)(applyToRhs: Tree => Tree): ValDef = vdef match {
+ case ValDef(mods0, name0, tpt0, rhs0) =>
+ treeCopy.ValDef(vdef, mods0, name0, tpt0, applyToRhs(rhs0))
+ case t =>
+ sys.error("Not a ValDef: " + t + "/" + t.getClass)
+ }
+ def deriveTemplate(templ: Tree)(applyToBody: List[Tree] => List[Tree]): Template = templ match {
+ case Template(parents0, self0, body0) =>
+ treeCopy.Template(templ, parents0, self0, applyToBody(body0))
+ case t =>
+ sys.error("Not a Template: " + t + "/" + t.getClass)
+ }
+ def deriveClassDef(cdef: Tree)(applyToImpl: Template => Template): ClassDef = cdef match {
+ case ClassDef(mods0, name0, tparams0, impl0) =>
+ treeCopy.ClassDef(cdef, mods0, name0, tparams0, applyToImpl(impl0))
+ case t =>
+ sys.error("Not a ClassDef: " + t + "/" + t.getClass)
+ }
+ def deriveCaseDef(cdef: Tree)(applyToBody: Tree => Tree): CaseDef = cdef match {
+ case CaseDef(pat0, guard0, body0) =>
+ treeCopy.CaseDef(cdef, pat0, guard0, applyToBody(body0))
+ case t =>
+ sys.error("Not a CaseDef: " + t + "/" + t.getClass)
+ }
+ def deriveLabelDef(ldef: Tree)(applyToRhs: Tree => Tree): LabelDef = ldef match {
+ case LabelDef(name0, params0, rhs0) =>
+ treeCopy.LabelDef(ldef, name0, params0, applyToRhs(rhs0))
+ case t =>
+ sys.error("Not a LabelDef: " + t + "/" + t.getClass)
+ }
+
class Traverser {
protected var currentOwner: Symbol = definitions.RootClass
@@ -705,6 +815,8 @@ trait Trees { self: Universe =>
}
case Assign(lhs, rhs) =>
traverse(lhs); traverse(rhs)
+ case AssignOrNamedArg(lhs, rhs) =>
+ traverse(lhs); traverse(rhs)
case If(cond, thenp, elsep) =>
traverse(cond); traverse(thenp); traverse(elsep)
case Match(selector, cases) =>
@@ -803,6 +915,7 @@ trait Trees { self: Universe =>
def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
def Function(tree: Tree, vparams: List[ValDef], body: Tree): Function
def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree): If
def Match(tree: Tree, selector: Tree, cases: List[CaseDef]): Match
def Return(tree: Tree, expr: Tree): Return
@@ -865,6 +978,8 @@ trait Trees { self: Universe =>
new Function(vparams, body).copyAttrs(tree)
def Assign(tree: Tree, lhs: Tree, rhs: Tree) =
new Assign(lhs, rhs).copyAttrs(tree)
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) =
+ new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) =
new If(cond, thenp, elsep).copyAttrs(tree)
def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) =
@@ -1010,6 +1125,11 @@ trait Trees { self: Universe =>
if (lhs0 == lhs) && (rhs0 == rhs) => t
case _ => treeCopy.Assign(tree, lhs, rhs)
}
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
+ case t @ AssignOrNamedArg(lhs0, rhs0)
+ if (lhs0 == lhs) && (rhs0 == rhs) => t
+ case _ => treeCopy.AssignOrNamedArg(tree, lhs, rhs)
+ }
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) = tree match {
case t @ If(cond0, thenp0, elsep0)
if (cond0 == cond) && (thenp0 == thenp) && (elsep0 == elsep) => t
@@ -1129,9 +1249,9 @@ trait Trees { self: Universe =>
abstract class Transformer {
val treeCopy: TreeCopier = newLazyTreeCopier
protected var currentOwner: Symbol = definitions.RootClass
- protected def currentMethod = currentOwner.enclMethod
- protected def currentClass = currentOwner.enclClass
- protected def currentPackage = currentOwner.toplevelClass.owner
+ protected def currentMethod = currentOwner.enclosingMethod
+ protected def currentClass = currentOwner.enclosingClass
+ protected def currentPackage = currentOwner.enclosingTopLevelClass.owner
def transform(tree: Tree): Tree = tree match {
case EmptyTree =>
tree
@@ -1194,6 +1314,8 @@ trait Trees { self: Universe =>
}
case Assign(lhs, rhs) =>
treeCopy.Assign(tree, transform(lhs), transform(rhs))
+ case AssignOrNamedArg(lhs, rhs) =>
+ treeCopy.AssignOrNamedArg(tree, transform(lhs), transform(rhs))
case If(cond, thenp, elsep) =>
treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep))
case Match(selector, cases) =>
@@ -1361,6 +1483,8 @@ trait Trees { self: Universe =>
// vparams => body where vparams:List[ValDef]
case Assign(lhs, rhs) =>
// lhs = rhs
+ case AssignOrNamedArg(lhs, rhs) => (eliminated by typer, resurrected by reifier)
+ // @annotation(lhs = rhs)
case If(cond, thenp, elsep) =>
// if (cond) thenp else elsep
case Match(selector, cases) =>
diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala
index 6185a788ae..cc8e85b9c8 100755
--- a/src/library/scala/reflect/api/Types.scala
+++ b/src/library/scala/reflect/api/Types.scala
@@ -6,7 +6,6 @@ trait Types { self: Universe =>
/** This class declares operations that are visible in a Type.
*/
abstract class AbsType {
-
/** The type symbol associated with the type, or `NoSymbol` for types
* that do not refer to a type symbol.
*/
@@ -47,7 +46,7 @@ trait Types { self: Universe =>
/** Substitute types in `to` for corresponding occurrences of references to
* symbols `from` in this type.
*/
- def subst(from: List[Symbol], to: List[Type]): Type
+ def substituteTypes(from: List[Symbol], to: List[Type]): Type // !!! Too many things with names like "subst"
/** If this is a parameterized types, the type arguments.
* Otherwise the empty list
@@ -56,7 +55,7 @@ trait Types { self: Universe =>
/** Is this type a type constructor that is missing its type arguments?
*/
- def isHigherKinded: Boolean
+ def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no?
/**
* Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
@@ -66,7 +65,7 @@ trait Types { self: Universe =>
* TypeRef(pre, <List>, List()) is replaced by
* PolyType(X, TypeRef(pre, <List>, List(X)))
*/
- def normalize: Type
+ def normalize: Type // !!! Alternative name? "normalize" is used to mean too many things.
/** Does this type conform to given type argument `that`? */
def <:< (that: Type): Boolean
@@ -74,11 +73,11 @@ trait Types { self: Universe =>
/** Is this type equivalent to given type argument `that`? */
def =:= (that: Type): Boolean
- /** The list of all baseclasses of this type (including its own typeSymbol)
+ /** The list of all base classes of this type (including its own typeSymbol)
* in reverse linearization order, starting with the class itself and ending
* in class Any.
*/
- def baseClasses: List[Symbol]
+ def baseClasses: List[Symbol] // !!! Alternative name, perhaps linearization?
/** The least type instance of given class which is a supertype
* of this type. Example:
@@ -104,9 +103,9 @@ trait Types { self: Universe =>
def asSeenFrom(pre: Type, clazz: Symbol): Type
/** The erased type corresponding to this type after
- * all transcformations from Scala to Java have been performed.
+ * all transformations from Scala to Java have been performed.
*/
- def erasedType: Type
+ def erasedType: Type // !!! "erasedType", compare with "widen" (so "erase") or "underlying" (so "erased")
/** Apply `f` to each part of this type, returning
* a new type. children get mapped before their parents */
@@ -138,10 +137,10 @@ trait Types { self: Universe =>
/** If this is a singleton type, widen it to its nearest underlying non-singleton
* base type by applying one or more `underlying` dereferences.
- * If this is not a singlecon type, returns this type itself.
+ * If this is not a singleton type, returns this type itself.
*
* Example:
- *
+ *
* class Outer { class C ; val x: C }
* val o: Outer
* <o.x.type>.widen = o.C
@@ -400,11 +399,6 @@ trait Types { self: Universe =>
def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
}
-
-
-
-
-
abstract class NullaryMethodTypeExtractor {
def apply(resultType: Type): NullaryMethodType
def unapply(tpe: NullaryMethodType): Option[(Type)]
diff --git a/src/library/scala/reflect/api/Universe.scala b/src/library/scala/reflect/api/Universe.scala
index 03acbdda2c..a3cec3271b 100755
--- a/src/library/scala/reflect/api/Universe.scala
+++ b/src/library/scala/reflect/api/Universe.scala
@@ -10,7 +10,8 @@ abstract class Universe extends Symbols
with Positions
with TreePrinters
with AnnotationInfos
- with StandardDefinitions {
+ with StandardDefinitions
+ with StandardNames {
type Position
val NoPosition: Position
diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala
index d0a2787fdf..2fd9bb6484 100644
--- a/src/library/scala/reflect/macro/Context.scala
+++ b/src/library/scala/reflect/macro/Context.scala
@@ -2,14 +2,35 @@ package scala.reflect
package macro
trait Context extends api.Universe {
-
+
/** Mark a variable as captured; i.e. force boxing in a *Ref type.
*/
def captureVariable(vble: Symbol): Unit
-
+
/** Mark given identifier as a reference to a captured variable itself
* suppressing dereferencing with the `elem` field.
*/
def referenceCapturedVariable(id: Ident): Tree
+ /** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type.
+ * For instance, given the abstract syntax tree representation of the `x + 1` expression:
+ *
+ * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
+ *
+ * The reifier transforms it to the following tree:
+ *
+ * $mr.Apply($mr.Select($mr.Ident($mr.newFreeVar("x", <Int>, x), "+"), List($mr.Literal($mr.Constant(1))))))
+ *
+ * The transformation looks mostly straightforward, but it has its tricky parts:
+ * * Reifier retains symbols and types defined outside the reified tree, however
+ * locally defined entities get erased and replaced with their original trees
+ * * Free variables are detected and wrapped in symbols of the type FreeVar
+ * * Mutable variables that are accessed from a local function are wrapped in refs
+ * * Since reified trees can be compiled outside of the scope they've been created in,
+ * special measures are taken to ensure that all freeVars remain visible
+ *
+ * Typical usage of this function is to retain some of the trees received/created by a macro
+ * into the form that can be inspected (via pattern matching) or compiled/run (by a reflective ToolBox) during the runtime.
+ */
+ def reify(tree: Tree): Tree
}
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index a9e5e90e20..b2f336fe52 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -9,6 +9,6 @@
package scala.runtime
-abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] {
+abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] {
}
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index 8be2745086..216e3e664b 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-
package scala.runtime
import scala.util.control.ControlThrowable
-class NonLocalReturnControl[T](val key: AnyRef, val value: T) extends ControlThrowable {
+class NonLocalReturnControl[@specialized T](val key: AnyRef, val value: T) extends ControlThrowable {
final override def fillInStackTrace(): Throwable = this
}
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 951bdd888e..8bc63ae3a0 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -36,7 +36,16 @@ object ScalaRunTime {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
case _ => false
}
- private val tupleNames = 1 to 22 map ("scala.Tuple" + _) toSet
+ // Avoiding boxing which messes up the specialized tests. Don't ask.
+ private val tupleNames = {
+ var i = 22
+ var names: List[String] = Nil
+ while (i >= 1) {
+ names ::= ("scala.Tuple" + String.valueOf(i))
+ i -= 1
+ }
+ names.toSet
+ }
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index 902faa166e..b876869afb 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-
-
package scala
+import Specializable._
+
/** Annotate type parameters on which code should be automatically
* specialized. For example:
* {{{
@@ -24,8 +24,9 @@ package scala
*
* @since 2.8
*/
-class specialized(types: SpecializableCompanion*) extends annotation.StaticAnnotation {
- def this() {
- this(Unit, Boolean, Byte, Short, Char, Int, Long, Float, Double)
- }
+// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation {
+
+class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation {
+ def this(types: Specializable*) = this(new Group(types.toList))
+ def this() = this(Everything)
}
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 44e573896f..77e36f6196 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -13,15 +13,25 @@ import processInternal._
import java.io.{ BufferedReader, InputStreamReader, FilterInputStream, FilterOutputStream }
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.immutable.Stream
+import scala.annotation.tailrec
/**
* This object contains factories for [[scala.sys.process.ProcessIO]],
* which can be used to control the I/O of a [[scala.sys.process.Process]]
* when a [[scala.sys.process.ProcessBuilder]] is started with the `run`
* command.
+ *
+ * It also contains some helper methods that can be used to in the creation of
+ * `ProcessIO`.
+ *
+ * It is used by other classes in the package in the implementation of various
+ * features, but can also be used by client code.
*/
object BasicIO {
+ /** Size of the buffer used in all the functions that copy data */
final val BufferSize = 8192
+
+ /** Used to separate lines in the `processFully` function that takes `Appendable`. */
final val Newline = props("line.separator")
private[process] final class Streamed[T](
@@ -52,15 +62,70 @@ object BasicIO {
def protect(out: OutputStream): OutputStream = if ((out eq stdout) || (out eq stderr)) Uncloseable(out) else out
}
+ /** Creates a `ProcessIO` from a function `String => Unit`. It can attach the
+ * process input to stdin, and it will either send the error stream to
+ * stderr, or to a `ProcessLogger`.
+ *
+ * For example, the `ProcessIO` created below will print all normal output
+ * while ignoring all error output. No input will be provided.
+ * {{{
+ * import scala.sys.process.BasicIO
+ * val errToDevNull = BasicIO(false, println(_), None)
+ * }}}
+ *
+ * @param withIn True if the process input should be attached to stdin.
+ * @param output A function that will be called with the process output.
+ * @param log An optional `ProcessLogger` to which the output should be
+ * sent. If `None`, output will be sent to stderr.
+ * @return A `ProcessIO` with the characteristics above.
+ */
def apply(withIn: Boolean, output: String => Unit, log: Option[ProcessLogger]) =
new ProcessIO(input(withIn), processFully(output), getErr(log))
+ /** Creates a `ProcessIO` that appends its output to a `StringBuffer`. It can
+ * attach the process input to stdin, and it will either send the error
+ * stream to stderr, or to a `ProcessLogger`.
+ *
+ * For example, the `ProcessIO` created by the function below will store the
+ * normal output on the buffer provided, and print all error on stderr. The
+ * input will be read from stdin.
+ * {{{
+ * import scala.sys.process.{BasicIO, ProcessLogger}
+ * val printer = ProcessLogger(println(_))
+ * def appendToBuffer(b: StringBuffer) = BasicIO(true, b, Some(printer))
+ * }}}
+ *
+ * @param withIn True if the process input should be attached to stdin.
+ * @param buffer A `StringBuffer` which will receive the process normal
+ * output.
+ * @param log An optional `ProcessLogger` to which the output should be
+ * sent. If `None`, output will be sent to stderr.
+ * @return A `ProcessIO` with the characteristics above.
+ */
def apply(withIn: Boolean, buffer: StringBuffer, log: Option[ProcessLogger]) =
new ProcessIO(input(withIn), processFully(buffer), getErr(log))
+ /** Creates a `ProcessIO` from a `ProcessLogger` . It can attach the
+ * process input to stdin.
+ *
+ * @param withIn True if the process input should be attached to stdin.
+ * @param log A `ProcessLogger` to receive all output, normal and error.
+ * @return A `ProcessIO` with the characteristics above.
+ */
def apply(withIn: Boolean, log: ProcessLogger) =
new ProcessIO(input(withIn), processOutFully(log), processErrFully(log))
+ /** Returns a function `InputStream => Unit` given an optional
+ * `ProcessLogger`. If no logger is passed, the function will send the output
+ * to stderr. This function can be used to create a
+ * [[scala.sys.process.ProcessIO]].
+ *
+ * @param log An optional `ProcessLogger` to which the contents of
+ * the `InputStream` will be sent.
+ * @return A function `InputStream => Unit` (used by
+ * [[scala.sys.process.ProcessIO]]) which will send the data to
+ * either the provided `ProcessLogger` or, if `None`, to stderr.
+ */
def getErr(log: Option[ProcessLogger]) = log match {
case Some(lg) => processErrFully(lg)
case None => toStdErr
@@ -69,13 +134,40 @@ object BasicIO {
private def processErrFully(log: ProcessLogger) = processFully(log err _)
private def processOutFully(log: ProcessLogger) = processFully(log out _)
+ /** Closes a `Closeable` without throwing an exception */
def close(c: Closeable) = try c.close() catch { case _: IOException => () }
+
+ /** Returns a function `InputStream => Unit` that appends all data read to the
+ * provided `Appendable`. This function can be used to create a
+ * [[scala.sys.process.ProcessIO]]. The buffer will be appended line by line.
+ *
+ * @param buffer An `Appendable` such as `StringBuilder` or `StringBuffer`.
+ * @return A function `InputStream => Unit` (used by
+ * [[scala.sys.process.ProcessIO]] which will append all data read
+ * from the stream to the buffer.
+ */
def processFully(buffer: Appendable): InputStream => Unit = processFully(appendLine(buffer))
+
+ /** Returns a function `InputStream => Unit` that will call the passed
+ * function with all data read. This function can be used to create a
+ * [[scala.sys.process.ProcessIO]]. The `processLine` function will be called
+ * with each line read, and `Newline` will be appended after each line.
+ *
+ * @param processLine A function that will be called with all data read from
+ * the stream.
+ * @return A function `InputStream => Unit` (used by
+ * [[scala.sys.process.ProcessIO]] which will call `processLine`
+ * with all data read from the stream.
+ */
def processFully(processLine: String => Unit): InputStream => Unit = in => {
val reader = new BufferedReader(new InputStreamReader(in))
processLinesFully(processLine)(reader.readLine)
+ reader.close()
}
+ /** Calls `processLine` with the result of `readLine` until the latter returns
+ * `null`.
+ */
def processLinesFully(processLine: String => Unit)(readLine: () => String) {
def readFully() {
val line = readLine()
@@ -86,14 +178,38 @@ object BasicIO {
}
readFully()
}
- def connectToIn(o: OutputStream): Unit = transferFully(stdin, o)
- def input(connect: Boolean): OutputStream => Unit = if (connect) connectToIn else _ => ()
+
+ /** Copy contents of stdin to the `OutputStream`. */
+ def connectToIn(o: OutputStream): Unit = transferFully(Uncloseable protect stdin, o)
+
+ /** Returns a function `OutputStream => Unit` that either reads the content
+ * from stdin or does nothing. This function can be used by
+ * [[scala.sys.process.ProcessIO]].
+ */
+ def input(connect: Boolean): OutputStream => Unit = { outputToProcess =>
+ if (connect) connectToIn(outputToProcess)
+ outputToProcess.close()
+ }
+
+ /** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */
def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput))
+
+ /** Retruns a `ProcessIO` connected to stdout, stderr and the provided `in` */
def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr)
+ /** Send all the input from the stream to stderr, and closes the input stream
+ * afterwards.
+ */
def toStdErr = (in: InputStream) => transferFully(in, stderr)
+
+ /** Send all the input from the stream to stdout, and closes the input stream
+ * afterwards.
+ */
def toStdOut = (in: InputStream) => transferFully(in, stdout)
+ /** Copy all input from the input stream to the output stream. Closes the
+ * input stream once it's all read.
+ */
def transferFully(in: InputStream, out: OutputStream): Unit =
try transferFullyImpl(in, out)
catch onInterrupt(())
@@ -105,14 +221,16 @@ object BasicIO {
private[this] def transferFullyImpl(in: InputStream, out: OutputStream) {
val buffer = new Array[Byte](BufferSize)
- def loop() {
+ @tailrec def loop() {
val byteCount = in.read(buffer)
if (byteCount > 0) {
out.write(buffer, 0, byteCount)
- out.flush()
- loop()
+ // flush() will throw an exception once the process has terminated
+ val available = try { out.flush(); true } catch { case _: IOException => false }
+ if (available) loop()
}
}
loop()
+ in.close()
}
}
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index b8765aa615..c2a61af936 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -13,7 +13,7 @@ import processInternal._
import ProcessBuilder._
/** Represents a process that is running or has finished running.
- * It may be a compound process with several underlying native processes (such as 'a #&& b`).
+ * It may be a compound process with several underlying native processes (such as `a #&& b`).
*
* This trait is often not used directly, though its companion object contains
* factories for [[scala.sys.process.ProcessBuilder]], the main component of this
@@ -42,28 +42,28 @@ object Process extends ProcessImpl with ProcessCreation { }
* found on and used through [[scala.sys.process.Process]]'s companion object.
*/
trait ProcessCreation {
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `String`, including the
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String`, including the
* parameters.
*
* @example {{{ apply("cat file.txt") }}}
*/
def apply(command: String): ProcessBuilder = apply(command, None)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`,
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`,
* where the head is the command and each element of the tail is a parameter.
*
* @example {{{ apply("cat" :: files) }}}
*/
def apply(command: Seq[String]): ProcessBuilder = apply(command, None)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`,
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`,
* and a sequence of `String` representing the arguments.
*
* @example {{{ apply("cat", files) }}}
*/
def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command +: arguments, None)
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
* environment variables.
*
* @example {{{ apply("java", new java.ioFile("/opt/app"), "CLASSPATH" -> "library.jar") }}}
@@ -71,7 +71,7 @@ trait ProcessCreation {
def apply(command: String, cwd: File, extraEnv: (String, String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv: _*)
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
* environment variables.
*
* @example {{{ apply("java" :: javaArgs, new java.ioFile("/opt/app"), "CLASSPATH" -> "library.jar") }}}
@@ -79,7 +79,7 @@ trait ProcessCreation {
def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv: _*)
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
* `File` and extra environment variables.
*
* @example {{{ apply("java", params.get("cwd"), "CLASSPATH" -> "library.jar") }}}
@@ -93,7 +93,7 @@ trait ProcessCreation {
}*/
}
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
* `File` and extra environment variables.
*
* @example {{{ apply("java" :: javaArgs, params.get("cwd"), "CLASSPATH" -> "library.jar") }}}
@@ -105,7 +105,7 @@ trait ProcessCreation {
apply(jpb)
}
- /** create a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`.
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`.
*
* @example {{{
* apply((new java.lang.ProcessBuilder("ls", "-l")) directory new java.io.File(System.getProperty("user.home")))
@@ -113,19 +113,19 @@ trait ProcessCreation {
*/
def apply(builder: JProcessBuilder): ProcessBuilder = new Simple(builder)
- /** create a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This
* `ProcessBuilder` can then be used as a `Source` or a `Sink`, so one can
* pipe things from and to it.
*/
def apply(file: File): FileBuilder = new FileImpl(file)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This
* `ProcessBuilder` can then be used as a `Source`, so that one can pipe things
* from it.
*/
def apply(url: URL): URLBuilder = new URLImpl(url)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element.
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element.
* This can be used as a way to template strings.
*
* @example {{{
@@ -134,23 +134,23 @@ trait ProcessCreation {
*/
def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be
* to force an exit value.
*/
def apply(value: Boolean): ProcessBuilder = apply(value.toString, if (value) 0 else 1)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `String` name and a
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String` name and a
* `Boolean`. This can be used to force an exit value, with the name being
* used for `toString`.
*/
def apply(name: String, exitValue: => Int): ProcessBuilder = new Dummy(name, exitValue)
- /** Create a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of
+ /** Creates a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of
* something else for which there's an implicit conversion to `Source`.
*/
def applySeq[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = builders.map(convert)
- /** Create a [[scala.sys.process.ProcessBuilder]] from one or more
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from one or more
* [[scala.sys.process.ProcessBuilder.Source]], which can then be
* piped to something else.
*
@@ -170,7 +170,7 @@ trait ProcessCreation {
*/
def cat(file: Source, files: Source*): ProcessBuilder = cat(file +: files)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence
* of [[scala.sys.process.ProcessBuilder.Source]], which can then be
* piped to something else.
*
@@ -198,18 +198,41 @@ trait ProcessImplicits {
/** Implicitly convert a `java.lang.ProcessBuilder` into a Scala one. */
implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder)
- /** Implicitly convert a `java.io.File` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a `java.io.File` into a
+ * [[scala.sys.process.ProcessBuilder.FileBuilder]], which can be used as
+ * either input or output of a process. For example:
+ * {{{
+ * import scala.sys.process._
+ * "ls" #> new java.io.File("dirContents.txt") !
+ * }}}
+ */
implicit def fileToProcess(file: File): FileBuilder = apply(file)
- /** Implicitly convert a `java.net.URL` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a `java.net.URL` into a
+ * [[scala.sys.process.ProcessBuilder.URLBuilder]] , which can be used as
+ * input to a process. For example:
+ * {{{
+ * import scala.sys.process._
+ * Seq("xmllint", "--html", "-") #< new java.net.URL("http://www.scala-lang.org") #> new java.io.File("fixed.html") !
+ * }}}
+ */
implicit def urlToProcess(url: URL): URLBuilder = apply(url)
- /** Implicitly convert a [[scala.xml.Elem]] into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a [[scala.xml.Elem]] into a
+ * [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text
+ * elements of the element, trimming spaces, and then converting the result
+ * from string to a process. Importantly, tags are completely ignored, so
+ * they cannot be used to separate parameters.
+ */
implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command)
- /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]]. */
implicit def stringToProcess(command: String): ProcessBuilder = apply(command)
- /** Implicitly convert a sequence of `String` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a sequence of `String` into a
+ * [[scala.sys.process.ProcessBuilder]]. The first argument will be taken to
+ * be the command to be executed, and the remaining will be its arguments.
+ * When using this, arguments may contain spaces.
+ */
implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command)
}
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index 214d908012..20270d423f 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -12,133 +12,265 @@ package process
import processInternal._
import ProcessBuilder._
-/** Represents a runnable process.
+/** Represents a sequence of one or more external processes that can be
+ * executed. A `ProcessBuilder` can be a single external process, or a
+ * combination of other `ProcessBuilder`. One can control where a
+ * the output of an external process will go to, and where its input will come
+ * from, or leave that decision to whoever starts it.
*
- * This is the main component of this package. A `ProcessBuilder` may be composed with
- * others, either concatenating their outputs or piping them from one to the next, and
- * possibly with conditional execution depending on the last process exit value.
+ * One creates a `ProcessBuilder` through factories provided in
+ * [[scala.sys.process.Process]]'s companion object, or implicit conversions
+ * based on these factories made available in the package object
+ * [[scala.sys.process]]. Here are some examples:
+ * {{{
+ * import.scala.sys.process._
*
- * Once executed, one can retrieve the output or redirect it to a
- * [[scala.sys.process.ProcessLogger]], or one can get the exit value, discarding or
- * redirecting the output.
+ * // Executes "ls" and sends output to stdout
+ * "ls".!
*
- * One creates a `ProcessBuilder` through factories provided in [[scala.sys.process.Process]]'s
- * companion object, or implicit conversions based on these factories made available in the
- * package object [[scala.sys.process]].
+ * // Execute "ls" and assign a `Stream[String]` of its output to "contents".
+ * // Because [[scala.Predef]] already defines a `lines` method for `String`,
+ * // we use [[scala.sys.process.Process]]'s object companion to create it.
+ * val contents = Process("ls").lines
*
- * Let's examine in detail one example of usage:
+ * // Here we use a `Seq` to make the parameter whitespace-safe
+ * def contentsOf(dir: String): String = Seq("ls", dir).!!
+ * }}}
+ *
+ * The methods of `ProcessBuilder` are divided in three categories: the ones that
+ * combine two `ProcessBuilder` to create a third, the ones that redirect input
+ * or output of a `ProcessBuilder`, and the ones that execute
+ * the external processes associated with it.
+ *
+ * ==Combining `ProcessBuilder`==
+ *
+ * Two existing `ProcessBuilder` can be combined in the following ways:
+ *
+ * * They can be executed in parallel, with the output of the first being fed
+ * as input to the second, like Unix pipes. This is achieved with the `#|`
+ * method.
+ * * They can be executed in sequence, with the second starting as soon as
+ * the first ends. This is done by the `###` method.
+ * * The execution of the second one can be conditioned by the return code
+ * (exit status) of the first, either only when it's zero, or only when it's
+ * not zero. The methods `#&&` and `#||` accomplish these tasks.
+ *
+ * ==Redirecting Input/Output==
+ *
+ * Though control of input and output can be done when executing the process,
+ * there's a few methods that create a new `ProcessBuilder` with a
+ * pre-configured input or output. They are `#<`, `#>` and `#>>`, and may take
+ * as input either another `ProcessBuilder` (like the pipe described above), or
+ * something else such as a `java.io.File` or a `java.lang.InputStream`.
+ * For example:
+ * {{{
+ * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") !
+ * }}}
+ *
+ * ==Starting Processes==
+ *
+ * To execute all external commands associated with a `ProcessBuilder`, one
+ * may use one of four groups of methods. Each of these methods have various
+ * overloads and variations to enable further control over the I/O. These
+ * methods are:
+ *
+ * * `run`: the most general method, it returns a
+ * [[scala.sys.process.Process]] immediately, and the external command
+ * executes concurrently.
+ * * `!`: blocks until all external commands exit, and returns the exit code
+ * of the last one in the chain of execution.
+ * * `!!`: blocks until all external commands exit, and returns a `String`
+ * with the output generated.
+ * * `lines`: returns immediately like `run`, and the output being generared
+ * is provided through a `Stream[String]`. Getting the next element of that
+ * `Stream` may block until it becomes available. This method will throw an
+ * exception if the return code is different than zero -- if this is not
+ * desired, use the `lines_!` method.
+ *
+ * ==Handling Input and Output==
+ *
+ * If not specified, the input of the external commands executed with `run` or
+ * `!` will not be tied to anything, and the output will be redirected to the
+ * stdout and stderr of the Scala process. For the methods `!!` and `lines`, no
+ * input will be provided, and the output will be directed according to the
+ * semantics of these methods.
*
+ * Some methods will cause stdin to be used as input. Output can be controlled
+ * with a [[scala.sys.process.ProcessLogger]] -- `!!` and `lines` will only
+ * redirect error output when passed a `ProcessLogger`. If one desires full
+ * control over input and output, then a [[scala.sys.process.ProcessIO]] can be
+ * used with `run`.
+ *
+ * For example, we could silence the error output from `lines_!` like this:
+ * {{{
+ * val etcFiles = "find /etc" lines_! ProcessLogger(line => ())
+ * }}}
+ *
+ * ==Extended Example==
+ *
+ * Let's examine in detail one example of usage:
* {{{
* import scala.sys.process._
* "find src -name *.scala -exec grep null {} ;" #| "xargs test -z" #&& "echo null-free" #|| "echo null detected" !
* }}}
- *
* Note that every `String` is implicitly converted into a `ProcessBuilder`
* through the implicits imported from [[scala.sys.process]]. These `ProcessBuilder` are then
* combined in three different ways.
*
* 1. `#|` pipes the output of the first command into the input of the second command. It
- * mirrors a shell pipe (`|`).
- * 2. `#&&` conditionally executes the second command if the previous one finished with
- * exit value 0. It mirrors shell's `&&`.
- * 3. `#||` conditionally executes the third command if the exit value of the previous
- * command is different than zero. It mirrors shell's `&&`.
- *
- * Not shown here, the equivalent of a shell's `;` would be `###`. The reason for this name is
- * that `;` is a reserved token in Scala.
- *
- * Finally, `!` at the end executes the commands, and returns the exit value. If the output
- * was desired instead, one could run that with `!!` instead.
- *
- * If one wishes to execute the commands in background, one can either call `run`, which
- * returns a [[scala.sys.process.Process]] from which the exit value can be obtained, or
- * `lines`, which returns a [scala.collection.immutable.Stream] of output lines. This throws
- * an exception at the end of the `Stream` is the exit value is non-zero. To avoid exceptions,
- * one can use `lines_!` instead.
- *
- * One can also start the commands in specific ways to further control their I/O. Using `!<` to
- * start the commands will use the stdin from the current process for them. All methods can
- * be used passing a [[scala.sys.process.ProcessLogger]] to capture the output, both stderr and
- * stdout. And, when using `run`, one can pass a [[scala.sys.process.ProcessIO]] to control
- * stdin, stdout and stderr.
- *
- * The stdin of a command can be redirected from a `java.io.InputStream`, a `java.io.File`, a
- * `java.net.URL` or another `ProcessBuilder` through the method `#<`. Likewise, the stdout
- * can be sent to a `java.io.OutputStream`, a `java.io.File` or another `ProcessBuilder` with
- * the method `#>`. The method `#>>` can be used to append the output to a `java.io.File`.
- * For example:
+ * mirrors a shell pipe (`|`).
+ * 1. `#&&` conditionally executes the second command if the previous one finished with
+ * exit value 0. It mirrors shell's `&&`.
+ * 1. `#||` conditionally executes the third command if the exit value of the previous
+ * command is different than zero. It mirrors shell's `&&`.
*
- * {{{
- * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") !
- * }}}
+ * Finally, `!` at the end executes the commands, and returns the exit value.
+ * Whatever is printed will be sent to the Scala process standard output. If
+ * we wanted to caputre it, we could run that with `!!` instead.
+ *
+ * Note: though it is not shown above, the equivalent of a shell's `;` would be
+ * `###`. The reason for this name is that `;` is a reserved token in Scala.
*/
trait ProcessBuilder extends Source with Sink {
- /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is
- * sent to the console. If the exit code is non-zero, an exception is thrown.*/
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the console. If
+ * the exit code is non-zero, an exception is thrown.
+ */
def !! : String
- /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is
- * sent to the provided ProcessLogger. If the exit code is non-zero, an exception is thrown.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the provided
+ * ProcessLogger. If the exit code is non-zero, an exception is thrown.
+ */
def !!(log: ProcessLogger): String
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination and then throw an exception. */
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the console. If
+ * the exit code is non-zero, an exception is thrown. The newly started
+ * process reads from standard input of the current process.
+ */
+ def !!< : String
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the provided
+ * ProcessLogger. If the exit code is non-zero, an exception is thrown. The
+ * newly started process reads from standard input of the current process.
+ */
+ def !!<(log: ProcessLogger): String
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the console. If the process exits
+ * with a non-zero value, the Stream will provide all lines up to termination
+ * and then throw an exception.
+ */
def lines: Stream[String]
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination but will not throw an exception. */
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the provided ProcessLogger. If the
+ * process exits with a non-zero value, the Stream will provide all lines up
+ * to termination but will not throw an exception.
+ */
def lines(log: ProcessLogger): Stream[String]
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination but will not throw an exception. */
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the console. If the process exits
+ * with a non-zero value, the Stream will provide all lines up to termination
+ * but will not throw an exception.
+ */
def lines_! : Stream[String]
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination but will not throw an exception. */
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the provided ProcessLogger. If the
+ * process exits with a non-zero value, the Stream will provide all lines up
+ * to termination but will not throw an exception.
+ */
def lines_!(log: ProcessLogger): Stream[String]
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the console.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the console.
+ */
def ! : Int
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the given ProcessLogger.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the given
+ * ProcessLogger.
+ */
def !(log: ProcessLogger): Int
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the console. The newly started process reads from standard input of the current process.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the console.
+ * The newly started process reads from standard input of the current process.
+ */
def !< : Int
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the given ProcessLogger. The newly started process reads from standard input of the current process.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the given
+ * ProcessLogger. The newly started process reads from standard input of the
+ * current process.
+ */
def !<(log: ProcessLogger): Int
- /** Starts the process represented by this builder. Standard output and error are sent to the console.*/
+
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the console.*/
def run(): Process
- /** Starts the process represented by this builder. Standard output and error are sent to the given ProcessLogger.*/
+
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the given ProcessLogger.
+ */
def run(log: ProcessLogger): Process
- /** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/
+
+ /** Starts the process represented by this builder. I/O is handled by the
+ * given ProcessIO instance.
+ */
def run(io: ProcessIO): Process
- /** Starts the process represented by this builder. Standard output and error are sent to the console.
- * The newly started process reads from standard input of the current process if `connectInput` is true.*/
+
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the console. The newly started process reads from standard
+ * input of the current process if `connectInput` is true.
+ */
def run(connectInput: Boolean): Process
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the given ProcessLogger.
- * The newly started process reads from standard input of the current process if `connectInput` is true.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the given
+ * ProcessLogger. The newly started process reads from standard input of the
+ * current process if `connectInput` is true.
+ */
def run(log: ProcessLogger, connectInput: Boolean): Process
- /** Constructs a command that runs this command first and then `other` if this command succeeds.*/
+ /** Constructs a command that runs this command first and then `other` if this
+ * command succeeds.
+ */
def #&& (other: ProcessBuilder): ProcessBuilder
- /** Constructs a command that runs this command first and then `other` if this command does not succeed.*/
+
+ /** Constructs a command that runs this command first and then `other` if this
+ * command does not succeed.
+ */
def #|| (other: ProcessBuilder): ProcessBuilder
- /** Constructs a command that will run this command and pipes the output to `other`. `other` must be a simple command.*/
+
+ /** Constructs a command that will run this command and pipes the output to
+ * `other`. `other` must be a simple command.
+ */
def #| (other: ProcessBuilder): ProcessBuilder
- /** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/
+
+ /** Constructs a command that will run this command and then `other`. The
+ * exit code will be the exit code of `other`.
+ */
def ### (other: ProcessBuilder): ProcessBuilder
- /** True if this command can be the target of a pipe.
- */
+
+ /** True if this command can be the target of a pipe. */
def canPipeTo: Boolean
- /** True if this command has an exit code which should be propagated to the user.
- * Given a pipe between A and B, if B.hasExitValue is true then the exit code will
- * be the one from B; if it is false, the one from A. This exists to prevent output
- * redirections (implemented as pipes) from masking useful process error codes.
- */
+ /** True if this command has an exit code which should be propagated to the
+ * user. Given a pipe between A and B, if B.hasExitValue is true then the
+ * exit code will be the one from B; if it is false, the one from A. This
+ * exists to prevent output redirections (implemented as pipes) from masking
+ * useful process error codes.
+ */
def hasExitValue: Boolean
}
diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala
index 261e837a4d..fa0674670f 100644
--- a/src/library/scala/sys/process/ProcessIO.scala
+++ b/src/library/scala/sys/process/ProcessIO.scala
@@ -11,14 +11,40 @@ package process
import processInternal._
-/** This class is used to control the I/O of every [[scala.sys.process.ProcessBuilder]].
- * Most of the time, there is no need to interact with `ProcessIO` directly. However, if
- * fine control over the I/O of a `ProcessBuilder` is desired, one can use the factories
- * on [[scala.sys.process.BasicIO]] stand-alone object to create one.
- *
- * Each method will be called in a separate thread.
- * If daemonizeThreads is true, they will all be marked daemon threads.
- */
+/** This class is used to control the I/O of every
+ * [[scala.sys.process.Process]]. The functions used to create it will be
+ * called with the process streams once it has been started. It might not be
+ * necessary to use `ProcessIO` directly --
+ * [[scala.sys.process.ProcessBuilder]] can return the process output to the
+ * caller, or use a [[scala.sys.process.ProcessLogger]] which avoids direct
+ * interaction with a stream. One can even use the factories at `BasicIO` to
+ * create a `ProcessIO`, or use its helper methods when creating one's own
+ * `ProcessIO`.
+ *
+ * When creating a `ProcessIO`, it is important to ''close all streams'' when
+ * finished, since the JVM might use system resources to capture the process
+ * input and output, and will not release them unless the streams are
+ * explicitly closed.
+ *
+ * `ProcessBuilder` will call `writeInput`, `processOutput` and `processError`
+ * in separate threads, and if daemonizeThreads is true, they will all be
+ * marked as daemon threads.
+ *
+ * @param writeInput Function that will be called with the `OutputStream` to
+ * which all input to the process must be written. This will
+ * be called in a newly spawned thread.
+ * @param processOutput Function that will be called with the `InputStream`
+ * from which all normal output of the process must be
+ * read from. This will be called in a newly spawned
+ * thread.
+ * @param processError Function that will be called with the `InputStream` from
+ * which all error output of the process must be read from.
+ * This will be called in a newly spawned thread.
+ * @param daemonizeThreads Indicates whether the newly spawned threads that
+ * will run `processOutput`, `processError` and
+ * `writeInput` should be marked as daemon threads.
+ * @note Failure to close the passed streams may result in resource leakage.
+ */
final class ProcessIO(
val writeInput: OutputStream => Unit,
val processOutput: InputStream => Unit,
@@ -27,8 +53,15 @@ final class ProcessIO(
) {
def this(in: OutputStream => Unit, out: InputStream => Unit, err: InputStream => Unit) = this(in, out, err, false)
+ /** Creates a new `ProcessIO` with a different handler for the process input. */
def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, daemonizeThreads)
+
+ /** Creates a new `ProcessIO` with a different handler for the normal output. */
def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError, daemonizeThreads)
+
+ /** Creates a new `ProcessIO` with a different handler for the error output. */
def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, daemonizeThreads)
+
+ /** Creates a new `ProcessIO`, with `daemonizeThreads` true. */
def daemonized(): ProcessIO = new ProcessIO(writeInput, processOutput, processError, true)
}
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index 67146dd70e..a8241db53c 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -11,12 +11,26 @@ package process
import java.io._
-/** Encapsulates the output and error streams of a running process.
- * Many of the methods of `ProcessBuilder` accept a `ProcessLogger` as
- * an argument.
- *
- * @see [[scala.sys.process.ProcessBuilder]]
- */
+/** Encapsulates the output and error streams of a running process. This is used
+ * by [[scala.sys.process.ProcessBuilder]] when starting a process, as an
+ * alternative to [[scala.sys.process.ProcessIO]], which can be more difficult
+ * to use. Note that a `ProcessLogger` will be used to create a `ProcessIO`
+ * anyway. The object `BasicIO` has some functions to do that.
+ *
+ * Here is an example that counts the number of lines in the normal and error
+ * output of a process:
+ * {{{
+ * import scala.sys.process._
+ *
+ * var normalLines = 0
+ * var errorLines = 0
+ * val countLogger = ProcessLogger(line => normalLines += 1,
+ * line => errorLines += 1)
+ * "find /etc" ! countLogger
+ * }}}
+ *
+ * @see [[scala.sys.process.ProcessBuilder]]
+ */
trait ProcessLogger {
/** Will be called with each line read from the process output stream.
*/
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index 3eb0e5bb89..c1bf470831 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -11,40 +11,175 @@
// for process debugging output.
//
package scala.sys {
- /**
- * This package is used to create process pipelines, similar to Unix command pipelines.
+ /** This package handles the execution of external processes. The contents of
+ * this package can be divided in three groups, according to their
+ * responsibilities:
*
- * The key concept is that one builds a [[scala.sys.process.Process]] that will run and return an exit
- * value. This `Process` is usually composed of one or more [[scala.sys.process.ProcessBuilder]], fed by a
- * [[scala.sys.process.ProcessBuilder.Source]] and feeding a [[scala.sys.process.ProcessBuilder.Sink]]. A
- * `ProcessBuilder` itself is both a `Source` and a `Sink`.
+ * - Indicating what to run and how to run it.
+ * - Handling a process input and output.
+ * - Running the process.
*
- * As `ProcessBuilder`, `Sink` and `Source` are abstract, one usually creates them with `apply` methods on
- * the companion object of [[scala.sys.process.Process]], or through implicit conversions available in this
- * package object from `String` and other types. The pipe is composed through unix-like pipeline and I/O
- * redirection operators available on [[scala.sys.process.ProcessBuilder]].
+ * For simple uses, the only group that matters is the first one. Running an
+ * external command can be as simple as `"ls".!`, or as complex as building a
+ * pipeline of commands such as this:
*
- * The example below shows how to build and combine such commands. It searches for `null` uses in the `src`
- * directory, printing a message indicating whether they were found or not. The first command pipes its
- * output to the second command, whose exit value is then used to choose between the third or fourth
- * commands. This same example is explained in greater detail on [[scala.sys.process.ProcessBuilder]].
+ * {{{
+ * import scala.sys.process._
+ * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines
+ * }}}
+ *
+ * We describe below the general concepts and architecture of the package,
+ * and then take a closer look at each of the categories mentioned above.
+ *
+ * ==Concepts and Architecture==
+ *
+ * The underlying basis for the whole package is Java's `Process` and
+ * `ProcessBuilder` classes. While there's no need to use these Java classes,
+ * they impose boundaries on what is possible. One cannot, for instance,
+ * retrieve a ''process id'' for whatever is executing.
+ *
+ * When executing an external process, one can provide a command's name,
+ * arguments to it, the directory in which it will be executed and what
+ * environment variables will be set. For each executing process, one can
+ * feed its standard input through a `java.io.OutputStream`, and read from
+ * its standard output and standard error through a pair of
+ * `java.io.InputStream`. One can wait until a process finishes execution and
+ * then retrieve its return value, or one can kill an executing process.
+ * Everything else must be built on those features.
+ *
+ * This package provides a DSL for running and chaining such processes,
+ * mimicking Unix shells ability to pipe output from one process to the input
+ * of another, or control the execution of further processes based on the
+ * return status of the previous one.
+ *
+ * In addition to this DSL, this package also provides a few ways of
+ * controlling input and output of these processes, going from simple and
+ * easy to use to complex and flexible.
*
+ * When processes are composed, a new `ProcessBuilder` is created which, when
+ * run, will execute the `ProcessBuilder` instances it is composed of
+ * according to the manner of the composition. If piping one process to
+ * another, they'll be executed simultaneously, and each will be passed a
+ * `ProcessIO` that will copy the output of one to the input of the other.
+ *
+ * ==What to Run and How==
+ *
+ * The central component of the process execution DSL is the
+ * [[scala.sys.process.ProcessBuilder]] trait. It is `ProcessBuilder` that
+ * implements the process execution DSL, that creates the
+ * [[scala.sys.process.Process]] that will handle the execution, and return
+ * the results of such execution to the caller. We can see that DSL in the
+ * introductory example: `#|`, `#&&` and `#!!` are methods on
+ * `ProcessBuilder` used to create a new `ProcessBuilder` through
+ * composition.
+ *
+ * One creates a `ProcessBuilder` either through factories on the
+ * [[scala.sys.process.Process]]'s companion object, or through implicit
+ * conversions available in this package object itself. Implicitly, each
+ * process is created either out of a `String`, with arguments separated by
+ * spaces -- no escaping of spaces is possible -- or out of a
+ * [[scala.collection.Seq]], where the first element represents the command
+ * name, and the remaining elements are arguments to it. In this latter case,
+ * arguments may contain spaces. One can also implicitly convert
+ * [[scala.xml.Elem]] and `java.lang.ProcessBuilder` into a `ProcessBuilder`.
+ * In the introductory example, the strings were converted into
+ * `ProcessBuilder` implicitly.
+ *
+ * To further control what how the process will be run, such as specifying
+ * the directory in which it will be run, see the factories on
+ * [[scala.sys.process.Process]]'s object companion.
+ *
+ * Once the desired `ProcessBuilder` is available, it can be executed in
+ * different ways, depending on how one desires to control its I/O, and what
+ * kind of result one wishes for:
+ *
+ * - Return status of the process (`!` methods)
+ * - Output of the process as a `String` (`!!` methods)
+ * - Continuous output of the process as a `Stream[String]` (`lines` methods)
+ * - The `Process` representing it (`run` methods)
+ *
+ * Some simple examples of these methods:
* {{{
* import scala.sys.process._
- * (
- * "find src -name *.scala -exec grep null {} ;"
- * #| "xargs test -z"
- * #&& "echo null-free" #|| "echo null detected"
- * ) !
+ *
+ * // This uses ! to get the exit code
+ * def fileExists(name: String) = Seq("test", "-f", name).! == 0
+ *
+ * // This uses !! to get the whole result as a string
+ * val dirContents = "ls".!!
+ *
+ * // This "fire-and-forgets" the method, which can be lazily read through
+ * // a Stream[String]
+ * def sourceFilesAt(baseDir: String): Stream[String] = {
+ * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f")
+ * cmd.lines
+ * }
* }}}
*
- * Other implicits available here are for [[scala.sys.process.ProcessBuilder.FileBuilder]], which extends
- * both `Sink` and `Source`, and for [[scala.sys.process.ProcessBuilder.URLBuilder]], which extends
- * `Source` alone.
+ * We'll see more details about controlling I/O of the process in the next
+ * section.
+ *
+ * ==Handling Input and Output==
+ *
+ * In the underlying Java model, once a `Process` has been started, one can
+ * get `java.io.InputStream` and `java.io.OutpuStream` representing its
+ * output and input respectively. That is, what one writes to an
+ * `OutputStream` is turned into input to the process, and the output of a
+ * process can be read from an `InputStream` -- of which there are two, one
+ * representing normal output, and the other representing error output.
+ *
+ * This model creates a difficulty, which is that the code responsible for
+ * actually running the external processes is the one that has to take
+ * decisions about how to handle its I/O.
+ *
+ * This package presents an alternative model: the I/O of a running process
+ * is controlled by a [[scala.sys.process.ProcessIO]] object, which can be
+ * passed _to_ the code that runs the external process. A `ProcessIO` will
+ * have direct access to the java streams associated with the process I/O. It
+ * must, however, close these streams afterwards.
+ *
+ * Simpler abstractions are available, however. The components of this
+ * package that handle I/O are:
+ *
+ * - [[scala.sys.process.ProcessIO]]: provides the low level abstraction.
+ * - [[scala.sys.process.ProcessLogger]]: provides a higher level abstraction
+ * for output, and can be created through its object companion
+ * - [[scala.sys.process.BasicIO]]: a library of helper methods for the
+ * creation of `ProcessIO`.
+ * - This package object itself, with a few implicit conversions.
*
- * One can even create a `Process` solely out of these, without running any command. For example, this will
- * download from a URL to a file:
+ * Some examples of I/O handling:
+ * {{{
+ * import scala.sys.process._
+ *
+ * // An overly complex way of computing size of a compressed file
+ * def gzFileSize(name: String) = {
+ * val cat = Seq("zcat", "name")
+ * var count = 0
+ * def byteCounter(input: java.io.InputStream) = {
+ * while(input.read() != -1) count += 1
+ * input.close()
+ * }
+ * cat ! new ProcessIO(_.close(), byteCounter, _.close())
+ * count
+ * }
+ *
+ * // This "fire-and-forgets" the method, which can be lazily read through
+ * // a Stream[String], and accumulates all errors on a StringBuffer
+ * def sourceFilesAt(baseDir: String): (Stream[String], StringBuffer) = {
+ * val buffer = new StringBuffer()
+ * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f")
+ * val lines = cmd lines_! ProcessLogger(buffer append _)
+ * (lines, buffer)
+ * }
+ * }}}
*
+ * Instances of the java classes `java.io.File` and `java.net.URL` can both
+ * be used directly as input to other processes, and `java.io.File` can be
+ * used as output as well. One can even pipe one to the other directly
+ * without any intervening process, though that's not a design goal or
+ * recommended usage. For example, the following code will copy a web page to
+ * a file:
* {{{
* import java.io.File
* import java.net.URL
@@ -52,26 +187,33 @@ package scala.sys {
* new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") !
* }}}
*
- * One may use a `Process` directly through `ProcessBuilder`'s `run` method, which starts the process in
- * the background, and returns a `Process`. If background execution is not desired, one can get a
- * `ProcessBuilder` to execute through a method such as `!`, `lines`, `run` or variations thereof. That
- * will create the `Process` to execute the commands, and return either the exit value or the output, maybe
- * throwing an exception.
- *
- * Finally, when executing a `ProcessBuilder`, one may pass a [[scala.sys.process.ProcessLogger]] to
- * capture stdout and stderr of the executing processes. A `ProcessLogger` may be created through its
- * companion object from functions of type `(String) => Unit`, or one might redirect it to a file, using
- * [[scala.sys.process.FileProcessLogger]], which can also be created through `ProcessLogger`'s object
- * companion.
+ * More information about the other ways of controlling I/O can be looked at
+ * in the scaladoc for the associated objects, traits and classes.
+ *
+ * ==Running the Process==
+ *
+ * Paradoxically, this is the simplest component of all, and the one least
+ * likely to be interacted with. It consists solely of
+ * [[scala.sys.process.Process]], and it provides only two methods:
+ *
+ * - `exitValue()`: blocks until the process exit, and then returns the exit
+ * value. This is what happens when one uses the `!` method of
+ * `ProcessBuilder`.
+ * - `destroy()`: this will kill the external process and close the streams
+ * associated with it.
*/
package object process extends ProcessImplicits {
+ /** The arguments passed to `java` when creating this process */
def javaVmArguments: List[String] = {
import collection.JavaConversions._
java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
}
+ /** The input stream of this process */
def stdin = java.lang.System.in
+ /** The output stream of this process */
def stdout = java.lang.System.out
+ /** The error stream of this process */
def stderr = java.lang.System.err
}
// private val shell: String => Array[String] =
diff --git a/src/library/scala/util/Duration.scala b/src/library/scala/util/Duration.scala
new file mode 100644
index 0000000000..4c118f8b3b
--- /dev/null
+++ b/src/library/scala/util/Duration.scala
@@ -0,0 +1,485 @@
+/**
+ * Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.util
+
+import java.util.concurrent.TimeUnit
+import TimeUnit._
+import java.lang.{ Long ⇒ JLong, Double ⇒ JDouble }
+//import akka.actor.ActorSystem (commented methods)
+
+class TimerException(message: String) extends RuntimeException(message)
+
+/**
+ * Simple timer class.
+ * Usage:
+ * <pre>
+ * import akka.util.duration._
+ * import akka.util.Timer
+ *
+ * val timer = Timer(30.seconds)
+ * while (timer.isTicking) { ... }
+ * </pre>
+ */
+case class Timer(duration: Duration, throwExceptionOnTimeout: Boolean = false) {
+ val startTimeInMillis = System.currentTimeMillis
+ val timeoutInMillis = duration.toMillis
+
+ /**
+ * Returns true while the timer is ticking. After that it either throws and exception or
+ * returns false. Depending on if the 'throwExceptionOnTimeout' argument is true or false.
+ */
+ def isTicking: Boolean = {
+ if (!(timeoutInMillis > (System.currentTimeMillis - startTimeInMillis))) {
+ if (throwExceptionOnTimeout) throw new TimerException("Time out after " + duration)
+ else false
+ } else true
+ }
+}
+
+object Duration {
+ def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit)
+ def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
+ def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit))
+
+ def fromNanos(nanos: Long): Duration = {
+ if (nanos % 86400000000000L == 0) {
+ Duration(nanos / 86400000000000L, DAYS)
+ } else if (nanos % 3600000000000L == 0) {
+ Duration(nanos / 3600000000000L, HOURS)
+ } else if (nanos % 60000000000L == 0) {
+ Duration(nanos / 60000000000L, MINUTES)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000L == 0) {
+ Duration(nanos / 1000000L, MILLISECONDS)
+ } else if (nanos % 1000L == 0) {
+ Duration(nanos / 1000L, MICROSECONDS)
+ } else {
+ Duration(nanos, NANOSECONDS)
+ }
+ }
+
+ def fromNanos(nanos: Double): Duration = fromNanos((nanos + 0.5).asInstanceOf[Long])
+
+ /**
+ * Construct a Duration by parsing a String. In case of a format error, a
+ * RuntimeException is thrown. See `unapply(String)` for more information.
+ */
+ def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error")
+
+ /**
+ * Deconstruct a Duration into length and unit if it is finite.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] = {
+ if (d.finite_?) {
+ Some((d.length, d.unit))
+ } else {
+ None
+ }
+ }
+
+ private val RE = ("""^\s*(\d+(?:\.\d+)?)\s*""" + // length part
+ "(?:" + // units are distinguished in separate match groups
+ "(d|day|days)|" +
+ "(h|hour|hours)|" +
+ "(min|minute|minutes)|" +
+ "(s|sec|second|seconds)|" +
+ "(ms|milli|millis|millisecond|milliseconds)|" +
+ "(µs|micro|micros|microsecond|microseconds)|" +
+ "(ns|nano|nanos|nanosecond|nanoseconds)" +
+ """)\s*$""").r // close the non-capturing group
+ private val REinf = """^\s*Inf\s*$""".r
+ private val REminf = """^\s*(?:-\s*|Minus)Inf\s*""".r
+
+ /**
+ * Parse String, return None if no match. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"` and `"-Inf"` or `"MinusInf"`.
+ */
+ def unapply(s: String): Option[Duration] = s match {
+ case RE(length, d, h, m, s, ms, mus, ns) ⇒
+ if (d ne null) Some(Duration(JDouble.parseDouble(length), DAYS)) else if (h ne null) Some(Duration(JDouble.parseDouble(length), HOURS)) else if (m ne null) Some(Duration(JDouble.parseDouble(length), MINUTES)) else if (s ne null) Some(Duration(JDouble.parseDouble(length), SECONDS)) else if (ms ne null) Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) else if (mus ne null) Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) else if (ns ne null) Some(Duration(JDouble.parseDouble(length), NANOSECONDS)) else
+ sys.error("made some error in regex (should not be possible)")
+ case REinf() ⇒ Some(Inf)
+ case REminf() ⇒ Some(MinusInf)
+ case _ ⇒ None
+ }
+
+ /**
+ * Parse TimeUnit from string representation.
+ */
+ def timeUnit(unit: String) = unit.toLowerCase match {
+ case "d" | "day" | "days" ⇒ DAYS
+ case "h" | "hour" | "hours" ⇒ HOURS
+ case "min" | "minute" | "minutes" ⇒ MINUTES
+ case "s" | "sec" | "second" | "seconds" ⇒ SECONDS
+ case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ MILLISECONDS
+ case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ MICROSECONDS
+ case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ NANOSECONDS
+ }
+
+ val Zero: Duration = new FiniteDuration(0, NANOSECONDS)
+ val Undefined: Duration = new Duration with Infinite {
+ override def toString = "Duration.Undefined"
+ override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
+ override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
+ override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
+ override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
+ override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
+ override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
+ def >(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def >=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def <(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def <=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
+ }
+
+ trait Infinite {
+ this: Duration ⇒
+
+ override def equals(other: Any) = false
+
+ def +(other: Duration): Duration =
+ other match {
+ case _: this.type ⇒ this
+ case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities")
+ case _ ⇒ this
+ }
+ def -(other: Duration): Duration =
+ other match {
+ case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities")
+ case _ ⇒ this
+ }
+ def *(factor: Double): Duration = this
+ def /(factor: Double): Duration = this
+ def /(other: Duration): Double =
+ other match {
+ case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities")
+ // maybe questionable but pragmatic: Inf / 0 => Inf
+ case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ }
+
+ def finite_? = false
+
+ def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
+ def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
+ def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
+ def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
+ def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
+ def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
+ def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
+ def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
+ def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
+ def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
+
+ def printHMS = toString
+ }
+
+ /**
+ * Infinite duration: greater than any other and not equal to any other,
+ * including itself.
+ */
+ val Inf: Duration = new Duration with Infinite {
+ override def toString = "Duration.Inf"
+ def >(other: Duration) = true
+ def >=(other: Duration) = true
+ def <(other: Duration) = false
+ def <=(other: Duration) = false
+ def unary_- : Duration = MinusInf
+ }
+
+ /**
+ * Infinite negative duration: lesser than any other and not equal to any other,
+ * including itself.
+ */
+ val MinusInf: Duration = new Duration with Infinite {
+ override def toString = "Duration.MinusInf"
+ def >(other: Duration) = false
+ def >=(other: Duration) = false
+ def <(other: Duration) = true
+ def <=(other: Duration) = true
+ def unary_- : Duration = Inf
+ }
+
+ // Java Factories
+ def create(length: Long, unit: TimeUnit): Duration = apply(length, unit)
+ def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
+ def create(length: Long, unit: String): Duration = apply(length, unit)
+ def parse(s: String): Duration = unapply(s).get
+}
+
+/**
+ * Utility for working with java.util.concurrent.TimeUnit durations.
+ *
+ * <p/>
+ * Examples of usage from Java:
+ * <pre>
+ * import akka.util.FiniteDuration;
+ * import java.util.concurrent.TimeUnit;
+ *
+ * Duration duration = new FiniteDuration(100, MILLISECONDS);
+ * Duration duration = new FiniteDuration(5, "seconds");
+ *
+ * duration.toNanos();
+ * </pre>
+ *
+ * <p/>
+ * Examples of usage from Scala:
+ * <pre>
+ * import akka.util.Duration
+ * import java.util.concurrent.TimeUnit
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * </pre>
+ *
+ * <p/>
+ * Implicits are also provided for Int, Long and Double. Example usage:
+ * <pre>
+ * import akka.util.duration._
+ *
+ * val duration = 100 millis
+ * </pre>
+ *
+ * Extractors, parsing and arithmetic are also included:
+ * <pre>
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * </pre>
+ */
+abstract class Duration extends Serializable {
+ def length: Long
+ def unit: TimeUnit
+ def toNanos: Long
+ def toMicros: Long
+ def toMillis: Long
+ def toSeconds: Long
+ def toMinutes: Long
+ def toHours: Long
+ def toDays: Long
+ def toUnit(unit: TimeUnit): Double
+ def printHMS: String
+ def <(other: Duration): Boolean
+ def <=(other: Duration): Boolean
+ def >(other: Duration): Boolean
+ def >=(other: Duration): Boolean
+ def +(other: Duration): Duration
+ def -(other: Duration): Duration
+ def *(factor: Double): Duration
+ def /(factor: Double): Duration
+ def /(other: Duration): Double
+ def unary_- : Duration
+ def finite_? : Boolean
+// def dilated(implicit system: ActorSystem): Duration = this * system.settings.TestTimeFactor
+ def min(other: Duration): Duration = if (this < other) this else other
+ def max(other: Duration): Duration = if (this > other) this else other
+ def sleep(): Unit = Thread.sleep(toMillis)
+
+ // Java API
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def plus(other: Duration) = this + other
+ def minus(other: Duration) = this - other
+ def mul(factor: Double) = this * factor
+ def div(factor: Double) = this / factor
+ def div(other: Duration) = this / other
+ def neg() = -this
+ def isFinite() = finite_?
+}
+
+class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import Duration._
+
+ def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit))
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
+ def toSeconds = unit.toSeconds(length)
+ def toMinutes = unit.toMinutes(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
+ def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
+
+ override def toString = this match {
+ case Duration(1, DAYS) ⇒ "1 day"
+ case Duration(x, DAYS) ⇒ x + " days"
+ case Duration(1, HOURS) ⇒ "1 hour"
+ case Duration(x, HOURS) ⇒ x + " hours"
+ case Duration(1, MINUTES) ⇒ "1 minute"
+ case Duration(x, MINUTES) ⇒ x + " minutes"
+ case Duration(1, SECONDS) ⇒ "1 second"
+ case Duration(x, SECONDS) ⇒ x + " seconds"
+ case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
+ case Duration(x, MILLISECONDS) ⇒ x + " milliseconds"
+ case Duration(1, MICROSECONDS) ⇒ "1 microsecond"
+ case Duration(x, MICROSECONDS) ⇒ x + " microseconds"
+ case Duration(1, NANOSECONDS) ⇒ "1 nanosecond"
+ case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds"
+ }
+
+ def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000.0 % 60)
+
+ def <(other: Duration) = {
+ if (other.finite_?) {
+ toNanos < other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other > this
+ }
+ }
+
+ def <=(other: Duration) = {
+ if (other.finite_?) {
+ toNanos <= other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other >= this
+ }
+ }
+
+ def >(other: Duration) = {
+ if (other.finite_?) {
+ toNanos > other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other < this
+ }
+ }
+
+ def >=(other: Duration) = {
+ if (other.finite_?) {
+ toNanos >= other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other <= this
+ }
+ }
+
+ def +(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def -(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
+
+ def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
+
+ def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
+
+ def unary_- = Duration(-length, unit)
+
+ def finite_? = true
+
+ override def equals(other: Any) =
+ other.isInstanceOf[FiniteDuration] &&
+ toNanos == other.asInstanceOf[FiniteDuration].toNanos
+
+ override def hashCode = toNanos.asInstanceOf[Int]
+}
+
+class DurationInt(n: Int) {
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
+
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
+
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
+}
+
+class DurationLong(n: Long) {
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
+
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
+
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
+}
+
+class DurationDouble(d: Double) {
+ def nanoseconds = Duration(d, NANOSECONDS)
+ def nanos = Duration(d, NANOSECONDS)
+ def nanosecond = Duration(d, NANOSECONDS)
+ def nano = Duration(d, NANOSECONDS)
+
+ def microseconds = Duration(d, MICROSECONDS)
+ def micros = Duration(d, MICROSECONDS)
+ def microsecond = Duration(d, MICROSECONDS)
+ def micro = Duration(d, MICROSECONDS)
+
+ def milliseconds = Duration(d, MILLISECONDS)
+ def millis = Duration(d, MILLISECONDS)
+ def millisecond = Duration(d, MILLISECONDS)
+ def milli = Duration(d, MILLISECONDS)
+
+ def seconds = Duration(d, SECONDS)
+ def second = Duration(d, SECONDS)
+
+ def minutes = Duration(d, MINUTES)
+ def minute = Duration(d, MINUTES)
+
+ def hours = Duration(d, HOURS)
+ def hour = Duration(d, HOURS)
+
+ def days = Duration(d, DAYS)
+ def day = Duration(d, DAYS)
+}
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index df1c68ced4..0c7772cd07 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -142,7 +142,7 @@ private[scala] trait PropertiesTrait {
*/
def isWin = osName startsWith "Windows"
def isMac = javaVendor startsWith "Apple"
-
+
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
// and finally the system property based javaHome.
diff --git a/src/library/scala/util/Timeout.scala b/src/library/scala/util/Timeout.scala
new file mode 100644
index 0000000000..0190675344
--- /dev/null
+++ b/src/library/scala/util/Timeout.scala
@@ -0,0 +1,33 @@
+/**
+ * Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
+ */
+package scala.util
+
+import java.util.concurrent.TimeUnit
+
+case class Timeout(duration: Duration) {
+ def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
+ def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
+}
+
+object Timeout {
+ /**
+ * A timeout with zero duration, will cause most requests to always timeout.
+ */
+ val zero = new Timeout(Duration.Zero)
+
+ /**
+ * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
+ * as it may cause memory leaks, blocked threads, or may not even be supported by
+ * the receiver, which would result in an exception.
+ */
+ val never = new Timeout(Duration.Inf)
+
+ def apply(timeout: Long) = new Timeout(timeout)
+ def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
+
+ implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
+ implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
+ implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
+ //implicit def defaultTimeout(implicit system: ActorSystem) = system.settings.ActorTimeout (have to introduce this in ActorSystem)
+}
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
new file mode 100644
index 0000000000..c9bde81317
--- /dev/null
+++ b/src/library/scala/util/Try.scala
@@ -0,0 +1,165 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2008-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util
+
+
+
+import collection.Seq
+
+
+
+/**
+ * The `Try` type represents a computation that may either result in an exception,
+ * or return a success value. It's analagous to the `Either` type.
+ */
+sealed abstract class Try[+T] {
+ /**
+ * Returns true if the `Try` is a `Failure`, false otherwise.
+ */
+ def isFailure: Boolean
+
+ /**
+ * Returns true if the `Try` is a `Success`, false otherwise.
+ */
+ def isSuccess: Boolean
+
+ /**
+ * Returns the value from this `Success` or the given argument if this is a `Failure`.
+ */
+ def getOrElse[U >: T](default: => U) = if (isSuccess) get else default
+
+ /**
+ * Returns the value from this `Success` or throws the exception if this is a `Failure`.
+ */
+ def get: T
+
+ /**
+ * Applies the given function f if this is a Result.
+ */
+ def foreach[U](f: T => U): Unit
+
+ /**
+ * Returns the given function applied to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def flatMap[U](f: T => Try[U]): Try[U]
+
+ /**
+ * Maps the given function to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def map[U](f: T => U): Try[U]
+
+ def collect[U](pf: PartialFunction[T, U]): Try[U]
+
+ def exists(p: T => Boolean): Boolean
+
+ /**
+ * Converts this to a `Failure` if the predicate is not satisfied.
+ */
+ def filter(p: T => Boolean): Try[T]
+
+ /**
+ * Converts this to a `Failure` if the predicate is not satisfied.
+ */
+ def filterNot(p: T => Boolean): Try[T] = filter(x => !p(x))
+
+ /**
+ * Calls the exceptionHandler with the exception if this is a `Failure`. This is like `flatMap` for the exception.
+ */
+ def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U]
+
+ /**
+ * Calls the exceptionHandler with the exception if this is a `Failure`. This is like map for the exception.
+ */
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U]
+
+ /**
+ * Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
+ */
+ def toOption = if (isSuccess) Some(get) else None
+
+ def toSeq = if (isSuccess) Seq(get) else Seq()
+
+ /**
+ * Returns the given function applied to the value from this Success or returns this if this is a `Failure`.
+ * Alias for `flatMap`.
+ */
+ def andThen[U](f: T => Try[U]): Try[U] = flatMap(f)
+
+ /**
+ * Transforms a nested `Try`, i.e., a `Try` of type `Try[Try[T]]`,
+ * into an un-nested `Try`, i.e., a `Try` of type `Try[T]`.
+ */
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U]
+
+ def failed: Try[Throwable]
+}
+
+
+final case class Failure[+T](val exception: Throwable) extends Try[T] {
+ def isFailure = true
+ def isSuccess = false
+ def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = {
+ try {
+ if (rescueException.isDefinedAt(exception)) rescueException(exception) else this
+ } catch {
+ case e2 => Failure(e2)
+ }
+ }
+ def get: T = throw exception
+ def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U] = Failure[U](exception)
+ def foreach[U](f: T => U): Unit = {}
+ def map[U](f: T => U): Try[U] = Failure[U](exception)
+ def collect[U](pf: PartialFunction[T, U]): Try[U] = Failure[U](exception)
+ def filter(p: T => Boolean): Try[T] = this
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
+ if (rescueException.isDefinedAt(exception)) {
+ Try(rescueException(exception))
+ } else {
+ this
+ }
+ def exists(p: T => Boolean): Boolean = false
+ def failed: Try[Throwable] = Success(exception)
+}
+
+
+final case class Success[+T](r: T) extends Try[T] {
+ def isFailure = false
+ def isSuccess = true
+ def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = Success(r)
+ def get = r
+ def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(r)
+ catch {
+ case e => Failure(e)
+ }
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U] = r
+ def foreach[U](f: T => U): Unit = f(r)
+ def map[U](f: T => U): Try[U] = Try[U](f(r))
+ def collect[U](pf: PartialFunction[T, U]): Try[U] =
+ if (pf isDefinedAt r) Success(pf(r))
+ else Failure[U](new NoSuchElementException("Partial function not defined at " + r))
+ def filter(p: T => Boolean): Try[T] =
+ if (p(r)) this
+ else Failure(new NoSuchElementException("Predicate does not hold for " + r))
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
+ def exists(p: T => Boolean): Boolean = p(r)
+ def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+}
+
+
+object Try {
+
+ def apply[T](r: => T): Try[T] = {
+ try { Success(r) } catch {
+ case e => Failure(e)
+ }
+ }
+
+}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 4004a01ad9..9aaf0aeb54 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -487,7 +487,7 @@ trait Parsers {
}
/** Changes the error message produced by a parser.
- *
+ *
* This doesn't change the behavior of a parser on neither
* success nor failure, just on error. The semantics are
* slightly different than those obtained by doing `| error(msg)`,
@@ -794,7 +794,7 @@ trait Parsers {
*/
def chainl1[T, U](first: => Parser[T], p: => Parser[U], q: => Parser[(T, U) => T]): Parser[T]
= first ~ rep(q ~ p) ^^ {
- case x ~ xs => xs.foldLeft(x){(_, _) match {case (a, f ~ b) => f(a, b)}}
+ case x ~ xs => xs.foldLeft(x: T){case (a, f ~ b) => f(a, b)} // x's type annotation is needed to deal with changed type inference due to SI-5189
}
/** A parser generator that generalises the `rep1sep` generator so that `q`,
@@ -812,8 +812,7 @@ trait Parsers {
*/
def chainr1[T, U](p: => Parser[T], q: => Parser[(T, U) => U], combine: (T, U) => U, first: U): Parser[U]
= p ~ rep(q ~ p) ^^ {
- case x ~ xs => (new ~(combine, x) :: xs).
- foldRight(first){(_, _) match {case (f ~ a, b) => f(a, b)}}
+ case x ~ xs => (new ~(combine, x) :: xs).foldRight(first){case (f ~ a, b) => f(a, b)}
}
/** A parser generator for optional sub-phrases.
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index 3e0b02a415..39d68dbc18 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -165,7 +165,7 @@ object EmitManPage {
def main(args: Array[String]) = args match{
case Array(classname) => emitManPage(classname)
- case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
+ case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
case _ => sys.exit(1)
}
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
new file mode 100644
index 0000000000..1cb09b433a
--- /dev/null
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -0,0 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.tools.nsc._
+
+/** For testing compiler internals directly.
+ * Each source code string in "sources" will be compiled, and
+ * the check function will be called with the source code and the
+ * resulting CompilationUnit. The check implementation should
+ * test for what it wants to test and fail (via assert or other
+ * exception) if it is not happy.
+ */
+abstract class CompilerTest extends DirectTest {
+ def check(source: String, unit: global.CompilationUnit): Unit
+
+ lazy val global: Global = newCompiler()
+ lazy val units = compilationUnits(global)(sources: _ *)
+
+ override def extraSettings = "-usejavacp -d " + testOutput.path
+
+ def sources: List[String] = List(code)
+ def show() = (sources, units).zipped foreach check
+}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index be8cac9147..07444f8d4b 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -35,13 +35,37 @@ abstract class DirectTest extends App {
s processArguments (allArgs, true)
s
}
- // compile the code, optionally first adding to the settings
- def compile(args: String*) = {
+ // new compiler
+ def newCompiler(args: String*): Global = {
val settings = newSettings((CommandLineParser tokenize extraSettings) ++ args.toList)
- val global = new Global(settings)
- new global.Run compileSources List(new BatchSourceFile("<partest>", code))
+ new Global(settings)
+ }
+ def newSources(sourceCodes: String*) = sourceCodes.toList.zipWithIndex map {
+ case (src, idx) => new BatchSourceFile("newSource" + (idx + 1), src)
+ }
+ def compileString(global: Global)(sourceCode: String): Boolean = {
+ withRun(global)(_ compileSources newSources(sourceCode))
!global.reporter.hasErrors
}
+ def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
+ val units = withRun(global) { run =>
+ run compileSources newSources(sourceCodes: _*)
+ run.units.toList
+ }
+ if (global.reporter.hasErrors) {
+ global.reporter.flush()
+ sys.error("Compilation failure.")
+ }
+ units
+ }
+
+ def withRun[T](global: Global)(f: global.Run => T): T = {
+ global.reporter.reset()
+ f(new global.Run)
+ }
+
+ // compile the code, optionally first adding to the settings
+ def compile(args: String*) = compileString(newCompiler(args: _*))(code)
/** Constructor/main body **/
try show()
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index a90a61a9aa..524dc06327 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -15,10 +15,8 @@ import scala.tools.nsc.io.{ Directory, Path => SPath }
import nsc.util.ClassPath
import util.PathResolver
import scala.tools.ant.sabbus.CompilationPathProperty
-
import java.io.File
import java.lang.reflect.Method
-
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference, FileSet}
import org.apache.tools.ant.types.Commandline.Argument
@@ -309,6 +307,16 @@ class PartestTask extends Task with CompilationPathProperty {
val antRunner = new scala.tools.partest.nest.AntRunner
val antFileManager = antRunner.fileManager
+ // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // when that bug is fixed, this paragraph of code can be safely removed
+ // we hack into the classloader that will become parent classloader for scalac
+ // this way we ensure that reflective macro lookup will pick correct Code.lift
+ val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader]
+ val path = new org.apache.tools.ant.types.Path(getProject())
+ val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath)
+ path.setPath(newClassPath)
+ loader.setClassPath(path)
+
antFileManager.showDiff = showDiff
antFileManager.showLog = showLog
antFileManager.failed = runFailed
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index 6604bc551d..7aaa7bab00 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -12,6 +12,7 @@ import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
import scala.tools.nsc.interactive.RangePositions
import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
import scala.tools.nsc.util.{ ClassPath, FakePos }
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
import scala.tools.util.PathResolver
import io.Path
import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
@@ -112,6 +113,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
try {
NestUI.verbose("compiling "+toCompile)
NestUI.verbose("with classpath: "+global.classPath.toString)
+ NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
try new global.Run compile toCompile
catch {
case FatalError(msg) =>
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index 04f36ffa11..ac04c64c33 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -40,6 +40,15 @@ object PathSettings {
sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
}
+ // Directory <root>/test/files/codelib
+ lazy val srcCodeLibDir = Directory(srcDir / "codelib")
+
+ lazy val srcCodeLib: File = (
+ findJar(srcCodeLibDir, "code")
+ orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
+ getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
+ )
+
// Directory <root>/build
lazy val buildDir: Directory = {
val bases = testRoot :: testRoot.parents
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 7c6dd0848f..5cde63dc81 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -53,7 +53,13 @@ class ReflectiveRunner {
Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
- val sepLoader = new URLClassLoader(sepUrls, null)
+ var sepLoader = new URLClassLoader(sepUrls, null)
+
+ // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // when that bug is fixed, this paragraph of code can be safely removed
+ // we hack into the classloader that will become parent classloader for scalac
+ // this way we ensure that reflective macro lookup will pick correct Code.lift
+ sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null)
if (isPartestDebug)
println("Loading classes from:\n" + sepUrls.mkString("\n"))
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
index 3e5fe35f9e..fc5792e886 100644
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ b/src/partest/scala/tools/partest/nest/TestFile.scala
@@ -35,6 +35,10 @@ abstract class TestFile(val kind: String) extends TestFileCommon {
if (setOutDir)
settings.outputDirs setSingleOutput setOutDirTo.path
+ // adding code.jar to the classpath (to provide Code.lift services for reification tests)
+ settings.classpath prepend PathSettings.srcCodeLib.toString
+ if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", ""))
+
// have to catch bad flags somewhere
(flags forall (f => settings.processArgumentString(f)._1)) && {
settings.classpath append fileManager.CLASSPATH
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 952d99c318..3f2cb16082 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -520,7 +520,9 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
runTestCommon(file, expectFailure = false)((logFile, outDir) => {
val dir = file.getParentFile
- execTest(outDir, logFile) && diffCheck(compareOutput(dir, logFile))
+ // adding code.jar to the classpath (to provide Code.lift services for reification tests)
+ execTest(outDir, logFile, PathSettings.srcCodeLib.toString) &&
+ diffCheck(compareOutput(dir, logFile))
})
// Apache Ant 1.6 or newer
diff --git a/src/partest/scala/tools/partest/utils/CodeTest.scala b/src/partest/scala/tools/partest/utils/CodeTest.scala
deleted file mode 100644
index c236d89bbd..0000000000
--- a/src/partest/scala/tools/partest/utils/CodeTest.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package utils
-
-import scala.reflect.Code
-import reflect.runtime.Mirror.ToolBox
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.Settings
-
-/** Runner for testing code tree liftingg
- */
-object CodeTest {
- def static[T](code: () => T, args: Array[String] = Array()) = {
- println("static: "+code())
- }
-
- def apply[T](code: Code[T], args: Array[String] = Array()) = {
- println("testing: "+code.tree)
- println("type is: "+code.manifest.tpe)
- val isNullary = code.manifest.tpe.typeSymbol == scala.reflect.mirror.definitions.FunctionClass(0)
- val reporter = new ConsoleReporter(new Settings)
- val toolbox = new ToolBox(reporter, args mkString " ")
- val ttree = toolbox.typeCheck(code.tree, code.manifest.tpe)
- println("result = " + toolbox.showAttributed(ttree, printTypes = true, printIds = false))
- var evaluated = toolbox.runExpr(ttree)
- if (evaluated != null && isNullary) {
- val applyMeth = evaluated.getClass.getMethod("apply")
- evaluated = applyMeth.invoke(evaluated)
- }
- println("evaluated = "+evaluated)
- evaluated
- }
-}
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index 72b3824157..2cbeaa945f 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -41,31 +41,5 @@ object Classfiles {
CONSTANT_INTFMETHODREF -> "InterfaceMethod",
CONSTANT_NAMEANDTYPE -> "NameAndType"
)
-
- final val BAD_ATTR = 0x00000
- final val SOURCEFILE_ATTR = 0x00001
- final val SYNTHETIC_ATTR = 0x00002
- final val DEPRECATED_ATTR = 0x00004
- final val CODE_ATTR = 0x00008
- final val EXCEPTIONS_ATTR = 0x00010
- final val CONSTANT_VALUE_ATTR = 0x00020
- final val LINE_NUM_TABLE_ATTR = 0x00040
- final val LOCAL_VAR_TABLE_ATTR = 0x00080
- final val INNERCLASSES_ATTR = 0x08000
- final val META_ATTR = 0x10000
- final val SCALA_ATTR = 0x20000
-
- final val SOURCEFILE_N = "SourceFile"
- final val SYNTHETIC_N = "Synthetic"
- final val DEPRECATED_N = "Deprecated"
- final val CODE_N = "Code"
- final val EXCEPTIONS_N = "Exceptions"
- final val CONSTANT_VALUE_N = "ConstantValue"
- final val LINE_NUM_TABLE_N = "LineNumberTable"
- final val LOCAL_VAR_TABLE_N = "LocalVariableTable"
- final val INNERCLASSES_N = "InnerClasses"
- final val META_N = "JacoMeta"
- final val SCALA_N = "ScalaSignature"
- final val CONSTR_N = "<init>"
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
index 43f9c20b1d..70926208b3 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
@@ -130,7 +130,7 @@ trait StateRules {
def rep(in : S, t : T) : Result[S, T, X] = {
if (finished(t)) Success(in, t)
else rule(in) match {
- case Success(out, f) => rep(out, f(t))
+ case Success(out, f) => rep(out, f(t)) // SI-5189 f.asInstanceOf[T => T]
case Failure => Failure
case Error(x) => Error(x)
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index 84f28af7ce..1a4b3456b8 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -68,7 +68,7 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
val chunk: Array[Byte] = new Array[Byte](length)
System.arraycopy(bytes, pos, chunk, 0, length)
val str = new String(io.Codec.fromUTF8(bytes, pos, length))
-
+
StringBytesPair(str, chunk)
}
diff --git a/test/benchmarking/ParCtrie-bfs.scala b/test/benchmarking/ParCtrie-bfs.scala
new file mode 100644
index 0000000000..59149fff8c
--- /dev/null
+++ b/test/benchmarking/ParCtrie-bfs.scala
@@ -0,0 +1,73 @@
+
+
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+object Bfs extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+
+ type Node = (Int, Int);
+ type Parent = (Int, Int);
+
+ def up(n: Node) = (n._1, n._2 - 1);
+ def down(n: Node) = (n._1, n._2 + 1);
+ def left(n: Node) = (n._1 - 1, n._2);
+ def right(n: Node) = (n._1 + 1, n._2);
+
+ // create a map and a target
+ val target = (length / 2, length / 2);
+ val map = Array.tabulate(length, length)((x, y) => (x % 3) != 0 || (y % 3) != 0 || (x, y) == target)
+ def onMap(n: Node) = n._1 >= 0 && n._1 < length && n._2 >= 0 && n._2 < length
+
+ // open and closed lists
+ val open = ParCtrie[Node, Parent]()
+ val closed = ParCtrie[Node, Parent]()
+
+ collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+
+ override def setUp() {
+ open.clear()
+ closed.clear()
+
+ // a couple of starting positions
+ open((0, 0)) = null
+ open((length - 1, length - 1)) = null
+ open((0, length - 1)) = null
+ open((length - 1, 0)) = null
+ }
+
+ def run() = {
+ // greedy bfs path search
+ while (open.nonEmpty && !open.contains(target)) {
+ for ((node, parent) <- open) {
+ def expand(next: Node) {
+ if (onMap(next) && map(next._1)(next._2) && !closed.contains(next) && !open.contains(next)) {
+ open(next) = node
+ }
+ }
+ expand(up(node))
+ expand(down(node))
+ expand(left(node))
+ expand(right(node))
+ closed(node) = parent
+ open.remove(node)
+ }
+ }
+ }
+
+ override def tearDown() {
+ // print path
+ var pathnode = open(target)
+ while (closed.contains(pathnode)) {
+ print(pathnode + "->")
+ pathnode = closed(pathnode)
+ }
+ println()
+ }
+
+}
+
diff --git a/test/benchmarking/ParCtrie-map.scala b/test/benchmarking/ParCtrie-map.scala
new file mode 100644
index 0000000000..c8de99f33e
--- /dev/null
+++ b/test/benchmarking/ParCtrie-map.scala
@@ -0,0 +1,21 @@
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+
+object Map extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ val parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+
+ collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+
+ def run = {
+ parctrie map {
+ kv => kv
+ }
+ }
+}
+
diff --git a/test/benchmarking/ParCtrie-nums.scala b/test/benchmarking/ParCtrie-nums.scala
new file mode 100644
index 0000000000..76d1966d1f
--- /dev/null
+++ b/test/benchmarking/ParCtrie-nums.scala
@@ -0,0 +1,39 @@
+
+
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+case class Entry(num: Double) {
+ var sqrt = num
+}
+
+
+object Nums extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ var entries: Seq[Entry] = null
+ var results: ParCtrie[Double, Entry] = null
+
+ collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+
+ override def setUp() {
+ entries = (1 until length) map { num => Entry(num.toDouble) }
+ results = ParCtrie()
+ for (e <- entries) results += ((e.num, e))
+ }
+
+ def run() = {
+ while (results.nonEmpty) {
+ for ((num, e) <- results) {
+ val nsqrt = 0.5 * (e.sqrt + e.num / e.sqrt)
+ if (math.abs(nsqrt - e.sqrt) < 0.01) {
+ results.remove(num)
+ } else e.sqrt = nsqrt
+ }
+ }
+ }
+}
+
diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala
new file mode 100644
index 0000000000..5a6191fb62
--- /dev/null
+++ b/test/benchmarking/ParCtrie-size.scala
@@ -0,0 +1,34 @@
+
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+
+object Size extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ var parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+
+ collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+
+ def run = {
+ parctrie.size
+ }
+
+ var iteration = 0
+
+ override def tearDown() {
+ iteration += 1
+ if (iteration % 4 == 0) parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ }
+
+}
+
+
+
+
+
+
+
diff --git a/test/benchmarking/TreeSetInsert.scala b/test/benchmarking/TreeSetInsert.scala
index 9ede8aedc5..23444aa305 100644
--- a/test/benchmarking/TreeSetInsert.scala
+++ b/test/benchmarking/TreeSetInsert.scala
@@ -33,6 +33,7 @@ object JavaUtilTS extends testing.Benchmark {
}
}
+
object MutableTS extends testing.Benchmark {
val length = sys.props("length").toInt
var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
@@ -50,6 +51,7 @@ object MutableTS extends testing.Benchmark {
}
}
+
object ImmutableTS extends testing.Benchmark {
val length = sys.props("length").toInt
var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
diff --git a/test/files/buildmanager/t2652/A.scala b/test/disabled/buildmanager/t2652/A.scala
index a62506e890..a62506e890 100644
--- a/test/files/buildmanager/t2652/A.scala
+++ b/test/disabled/buildmanager/t2652/A.scala
diff --git a/test/files/buildmanager/t2652/B.scala b/test/disabled/buildmanager/t2652/B.scala
index 86d08f0d3d..86d08f0d3d 100644
--- a/test/files/buildmanager/t2652/B.scala
+++ b/test/disabled/buildmanager/t2652/B.scala
diff --git a/test/files/buildmanager/t2652/t2652.changes/A2.scala b/test/disabled/buildmanager/t2652/t2652.changes/A2.scala
index 29135c0e94..29135c0e94 100644
--- a/test/files/buildmanager/t2652/t2652.changes/A2.scala
+++ b/test/disabled/buildmanager/t2652/t2652.changes/A2.scala
diff --git a/test/files/buildmanager/t2652/t2652.check b/test/disabled/buildmanager/t2652/t2652.check
index 071281c6ff..071281c6ff 100644
--- a/test/files/buildmanager/t2652/t2652.check
+++ b/test/disabled/buildmanager/t2652/t2652.check
diff --git a/test/files/buildmanager/t2652/t2652.test b/test/disabled/buildmanager/t2652/t2652.test
index 6f3bd03361..6f3bd03361 100644
--- a/test/files/buildmanager/t2652/t2652.test
+++ b/test/disabled/buildmanager/t2652/t2652.test
diff --git a/test/disabled/jvm/scala-concurrent-tck-akka.scala b/test/disabled/jvm/scala-concurrent-tck-akka.scala
new file mode 100644
index 0000000000..dfd906e59e
--- /dev/null
+++ b/test/disabled/jvm/scala-concurrent-tck-akka.scala
@@ -0,0 +1,391 @@
+
+
+import akka.dispatch.{
+ Future => future,
+ Promise => promise
+}
+import akka.dispatch.Await.{result => await}
+
+// Duration required for await
+import akka.util.Duration
+import java.util.concurrent.TimeUnit
+import TimeUnit._
+
+import scala.concurrent.{
+ TimeoutException,
+ SyncVar,
+ ExecutionException
+}
+//import scala.concurrent.future
+//import scala.concurrent.promise
+//import scala.concurrent.await
+
+
+
+trait TestBase {
+
+ implicit val disp = akka.actor.ActorSystem().dispatcher
+
+ def once(body: (() => Unit) => Unit) {
+ val sv = new SyncVar[Boolean]
+ body(() => sv put true)
+ sv.take()
+ }
+
+}
+
+
+trait FutureCallbacks extends TestBase {
+
+ def testOnSuccess(): Unit = once {
+ done =>
+ var x = 0
+ val f = future {
+ x = 1
+ }
+ f onSuccess { case any =>
+ done()
+ assert(x == 1)
+ }
+ }
+
+ def testOnSuccessWhenCompleted(): Unit = once {
+ done =>
+ var x = 0
+ val f = future {
+ x = 1
+ }
+ f onSuccess { case any =>
+ assert(x == 1)
+ x = 2
+ f onSuccess { case any =>
+ assert(x == 2)
+ done()
+ }
+ }
+ }
+
+ def testOnSuccessWhenFailed(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ done()
+ throw new Exception
+ }
+ f onSuccess { case any =>
+ assert(false)
+ }
+ }
+
+ def testOnFailure(): Unit = once {
+ done =>
+ var x = 0
+ val f = future[Unit] {
+ x = 1
+ throw new Exception
+ }
+ f onSuccess { case any =>
+ done()
+ assert(false)
+ }
+ f onFailure {
+ case _ =>
+ done()
+ assert(x == 1)
+ }
+ }
+
+ def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
+ done =>
+ val f = future[Unit] {
+ throw cause
+ }
+ f onSuccess { case any =>
+ done()
+ assert(false)
+ }
+ f onFailure {
+ case e: ExecutionException if (e.getCause == cause) =>
+ done()
+ case _ =>
+ done()
+ assert(false)
+ }
+ }
+
+ def testOnFailureWhenTimeoutException(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ throw new TimeoutException()
+ }
+ f onSuccess { case any =>
+ done()
+ assert(false)
+ }
+ f onFailure {
+ case e: TimeoutException =>
+ done()
+ case other =>
+ done()
+ assert(false)
+ }
+ }
+
+ testOnSuccess()
+ testOnSuccessWhenCompleted()
+ testOnSuccessWhenFailed()
+ testOnFailure()
+// testOnFailureWhenSpecialThrowable(5, new Error)
+// testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
+// testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+// testOnFailureWhenTimeoutException()
+
+}
+
+
+trait FutureCombinators extends TestBase {
+
+ // map: stub
+ def testMapSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testMapFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // flatMap: stub
+ def testFlatMapSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testFlatMapFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // filter: stub
+ def testFilterSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testFilterFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // foreach: stub
+ def testForeachSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testForeachFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testRecoverSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recover {
+ case re: RuntimeException =>
+ "recovered"
+ } onSuccess { case x =>
+ done()
+ assert(x == "recovered")
+ } onFailure { case any =>
+ done()
+ assert(false)
+ }
+ }
+
+ def testRecoverFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recover {
+ case te: TimeoutException => "timeout"
+ } onSuccess { case x =>
+ done()
+ assert(false)
+ } onFailure { case any =>
+ done()
+ assert(any == cause)
+ }
+ }
+
+ testMapSuccess()
+ testMapFailure()
+ testFlatMapSuccess()
+ testFlatMapFailure()
+ testFilterSuccess()
+ testFilterFailure()
+ testForeachSuccess()
+ testForeachFailure()
+ testRecoverSuccess()
+ testRecoverFailure()
+
+}
+
+/*
+trait FutureProjections extends TestBase {
+
+ def testFailedFailureOnComplete(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ f.failed onComplete {
+ case Right(t) =>
+ assert(t == cause)
+ done()
+ case Left(t) =>
+ assert(false)
+ }
+ }
+
+ def testFailedFailureOnSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ f.failed onSuccess {
+ t =>
+ assert(t == cause)
+ done()
+ }
+ }
+
+ def testFailedSuccessOnComplete(): Unit = once {
+ done =>
+ val f = future { 0 }
+ f.failed onComplete {
+ case Right(t) =>
+ assert(false)
+ case Left(t) =>
+ assert(t.isInstanceOf[NoSuchElementException])
+ done()
+ }
+ }
+
+ def testFailedSuccessOnFailure(): Unit = once {
+ done =>
+ val f = future { 0 }
+ f.failed onFailure {
+ case nsee: NoSuchElementException =>
+ done()
+ }
+ }
+
+ def testFailedFailureAwait(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ assert(await(0, f.failed) == cause)
+ done()
+ }
+
+ def testFailedSuccessAwait(): Unit = once {
+ done =>
+ val f = future { 0 }
+ try {
+ println(await(0, f.failed))
+ assert(false)
+ } catch {
+ case nsee: NoSuchElementException => done()
+ }
+ }
+
+ testFailedFailureOnComplete()
+ testFailedFailureOnSuccess()
+ testFailedSuccessOnComplete()
+ testFailedSuccessOnFailure()
+ testFailedFailureAwait()
+ //testFailedSuccessAwait()
+
+}
+*/
+
+trait Blocking extends TestBase {
+
+ def testAwaitSuccess(): Unit = once {
+ done =>
+ val f = future { 0 }
+ await(f, Duration(500, "ms"))
+ done()
+ }
+
+ def testAwaitFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ try {
+ await(f, Duration(500, "ms"))
+ assert(false)
+ } catch {
+ case t =>
+ assert(t == cause)
+ done()
+ }
+ }
+
+ testAwaitSuccess()
+ testAwaitFailure()
+
+}
+
+/*
+trait Promises extends TestBase {
+
+ def testSuccess(): Unit = once {
+ done =>
+ val p = promise[Int]()
+ val f = p.future
+
+ f.onSuccess { x =>
+ done()
+ assert(x == 5)
+ } onFailure { case any =>
+ done()
+ assert(false)
+ }
+
+ p.success(5)
+ }
+
+ testSuccess()
+
+}
+*/
+
+trait Exceptions extends TestBase {
+
+}
+
+
+object Test
+extends App
+with FutureCallbacks
+with FutureCombinators
+/*with FutureProjections*/
+/*with Promises*/
+with Blocking
+with Exceptions
+{
+ System.exit(0)
+}
+
+
diff --git a/test/files/codelib/code.jar.desired.sha1 b/test/files/codelib/code.jar.desired.sha1
new file mode 100644
index 0000000000..dbf19cafd7
--- /dev/null
+++ b/test/files/codelib/code.jar.desired.sha1
@@ -0,0 +1 @@
+e25f1daf9010b9dc6038ae7069fc9d0f7d48a53b ?code.jar
diff --git a/test/files/continuations-neg/t5445.check b/test/files/continuations-neg/t5445.check
new file mode 100644
index 0000000000..eb2943b6a6
--- /dev/null
+++ b/test/files/continuations-neg/t5445.check
@@ -0,0 +1,4 @@
+t5445.scala:4: error: cps annotations not allowed on by-value parameters or value definitions
+ def foo(block: Unit @suspendable ): Unit @suspendable = {}
+ ^
+one error found
diff --git a/test/files/continuations-neg/t5445.scala b/test/files/continuations-neg/t5445.scala
new file mode 100644
index 0000000000..cb6f8f686d
--- /dev/null
+++ b/test/files/continuations-neg/t5445.scala
@@ -0,0 +1,5 @@
+import scala.util.continuations._
+
+object Test {
+ def foo(block: Unit @suspendable ): Unit @suspendable = {}
+}
diff --git a/test/files/continuations-run/t5506.check b/test/files/continuations-run/t5506.check
new file mode 100644
index 0000000000..38b76c63f1
--- /dev/null
+++ b/test/files/continuations-run/t5506.check
@@ -0,0 +1,7 @@
+List(1, 2, 3)
+List(1, 2, 3)
+List(1, 2, 3)
+List(1, 2, 3)
+List(1, 2, 3)
+List(1, 2, 3)
+List(1, 2, 3)
diff --git a/test/files/continuations-run/t5506.scala b/test/files/continuations-run/t5506.scala
new file mode 100644
index 0000000000..2b5c1118f7
--- /dev/null
+++ b/test/files/continuations-run/t5506.scala
@@ -0,0 +1,58 @@
+import scala.util.continuations._
+
+object Test {
+
+def g: List[Int] @suspendable = List(1,2,3)
+
+def fp10: List[Int] @suspendable = {
+g.map(x => x)
+}
+
+def fp11: List[Int] @suspendable = {
+val z = g.map(x => x)
+z
+}
+
+
+def fp12: List[Int] @suspendable = {
+val z = List(1,2,3)
+z.map(x => x)
+}
+
+
+
+def fp20: List[Int] @suspendable = {
+g.map[Int,List[Int]](x => x)
+}
+
+
+def fp21: List[Int] @suspendable = {
+val z = g.map[Int,List[Int]](x => x)
+z
+}
+
+def fp22: List[Int] @suspendable = {
+val z = g.map[Int,List[Int]](x => x)(List.canBuildFrom[Int])
+z
+}
+
+def fp23: List[Int] @suspendable = {
+val z = g.map(x => x)(List.canBuildFrom[Int])
+z
+}
+
+
+def main(args: Array[String]) = {
+ reset {
+ println(fp10)
+ println(fp11)
+ println(fp12)
+
+ println(fp20)
+ println(fp21)
+ println(fp22)
+ println(fp23)
+ }
+}
+
+}
diff --git a/test/files/continuations-run/t5538.check b/test/files/continuations-run/t5538.check
new file mode 100644
index 0000000000..457721d5e0
--- /dev/null
+++ b/test/files/continuations-run/t5538.check
@@ -0,0 +1 @@
+Future(Future(Future(Future(Future(List(1, 2, 3, 4, 5))))))
diff --git a/test/files/continuations-run/t5538.scala b/test/files/continuations-run/t5538.scala
new file mode 100644
index 0000000000..42f8163caf
--- /dev/null
+++ b/test/files/continuations-run/t5538.scala
@@ -0,0 +1,50 @@
+import scala.util.continuations._
+import scala.collection.generic.CanBuildFrom
+
+object Test {
+
+ class ExecutionContext
+
+ implicit def defaultExecutionContext = new ExecutionContext
+
+ case class Future[+T](x:T) {
+ final def map[A](f: T => A): Future[A] = new Future[A](f(x))
+ final def flatMap[A](f: T => Future[A]): Future[A] = f(x)
+ }
+
+ class PromiseStream[A] {
+ override def toString = xs.toString
+
+ var xs: List[A] = Nil
+
+ final def +=(elem: A): this.type = { xs :+= elem; this }
+
+ final def ++=(elem: Traversable[A]): this.type = { xs ++= elem; this }
+
+ final def <<(elem: Future[A]): PromiseStream[A] @cps[Future[Any]] =
+ shift { cont: (PromiseStream[A] => Future[Any]) => elem map (a => cont(this += a)) }
+
+ final def <<(elem1: Future[A], elem2: Future[A], elems: Future[A]*): PromiseStream[A] @cps[Future[Any]] =
+ shift { cont: (PromiseStream[A] => Future[Any]) => Future.flow(this << elem1 << elem2 <<< Future.sequence(elems.toSeq)) map cont }
+
+ final def <<<(elems: Traversable[A]): PromiseStream[A] @cps[Future[Any]] =
+ shift { cont: (PromiseStream[A] => Future[Any]) => cont(this ++= elems) }
+
+ final def <<<(elems: Future[Traversable[A]]): PromiseStream[A] @cps[Future[Any]] =
+ shift { cont: (PromiseStream[A] => Future[Any]) => elems map (as => cont(this ++= as)) }
+ }
+
+ object Future {
+
+ def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] =
+ new Future(in.asInstanceOf[Traversable[Future[A]]].map((f:Future[A])=>f.x)(cbf.asInstanceOf[CanBuildFrom[Traversable[Future[A]], A, M[A]]]))
+
+ def flow[A](body: => A @cps[Future[Any]])(implicit executor: ExecutionContext): Future[A] = reset(Future(body)).asInstanceOf[Future[A]]
+
+ }
+
+ def main(args: Array[String]) = {
+ val p = new PromiseStream[Int]
+ println(Future.flow(p << (Future(1), Future(2), Future(3), Future(4), Future(5))))
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/concurrent-future.check b/test/files/jvm/concurrent-future.check
new file mode 100644
index 0000000000..c55e824818
--- /dev/null
+++ b/test/files/jvm/concurrent-future.check
@@ -0,0 +1,16 @@
+test1: hai world
+test1: kthxbye
+test2: hai world
+test2: awsum thx
+test2: kthxbye
+test3: hai world
+test4: hai world
+test4: kthxbye
+test5: hai world
+test5: kthxbye
+test6: hai world
+test6: kthxbye
+test7: hai world
+test7: kthxbye
+test8: hai world
+test8: im in yr loop
diff --git a/test/files/jvm/concurrent-future.scala b/test/files/jvm/concurrent-future.scala
new file mode 100644
index 0000000000..b44d054219
--- /dev/null
+++ b/test/files/jvm/concurrent-future.scala
@@ -0,0 +1,122 @@
+
+
+
+import scala.concurrent._
+
+
+
+object Test extends App {
+
+ def once(body: (() => Unit) => Unit) {
+ val sv = new SyncVar[Boolean]
+ body(() => sv put true)
+ sv.take()
+ }
+
+ def output(num: Int, msg: String) {
+ println("test" + num + ": " + msg)
+ }
+
+ def testOnSuccess(): Unit = once {
+ done =>
+ val f = future {
+ output(1, "hai world")
+ }
+ f onSuccess { case _ =>
+ output(1, "kthxbye")
+ done()
+ }
+ }
+
+ def testOnSuccessWhenCompleted(): Unit = once {
+ done =>
+ val f = future {
+ output(2, "hai world")
+ }
+ f onSuccess { case _ =>
+ output(2, "awsum thx")
+ f onSuccess { case _ =>
+ output(2, "kthxbye")
+ done()
+ }
+ }
+ }
+
+ def testOnSuccessWhenFailed(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(3, "hai world")
+ done()
+ throw new Exception
+ }
+ f onSuccess { case _ =>
+ output(3, "onoes")
+ }
+ }
+
+ def testOnFailure(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(4, "hai world")
+ throw new Exception
+ }
+ f onSuccess { case _ =>
+ output(4, "onoes")
+ done()
+ }
+ f onFailure { case _ =>
+ output(4, "kthxbye")
+ done()
+ }
+ }
+
+ def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(num, "hai world")
+ throw cause
+ }
+ f onSuccess { case _ =>
+ output(num, "onoes")
+ done()
+ }
+ f onFailure {
+ case e: ExecutionException if (e.getCause == cause) =>
+ output(num, "kthxbye")
+ done()
+ case _ =>
+ output(num, "onoes")
+ done()
+ }
+ }
+
+ def testOnFailureWhenFutureTimeoutException(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(8, "hai world")
+ throw new FutureTimeoutException(null)
+ }
+ f onSuccess { case _ =>
+ output(8, "onoes")
+ done()
+ }
+ f onFailure {
+ case e: FutureTimeoutException =>
+ output(8, "im in yr loop")
+ done()
+ case other =>
+ output(8, "onoes: " + other)
+ done()
+ }
+ }
+
+ testOnSuccess()
+ testOnSuccessWhenCompleted()
+ testOnSuccessWhenFailed()
+ testOnFailure()
+ testOnFailureWhenSpecialThrowable(5, new Error)
+ testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
+ testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+ testOnFailureWhenFutureTimeoutException()
+
+}
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
new file mode 100644
index 0000000000..ba7dffbcb0
--- /dev/null
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -0,0 +1,413 @@
+
+
+
+import scala.concurrent.{
+ Future,
+ Promise,
+ TimeoutException,
+ SyncVar,
+ ExecutionException
+}
+import scala.concurrent.future
+import scala.concurrent.promise
+import scala.concurrent.blocking
+import scala.util.{ Try, Success, Failure }
+
+import scala.util.Duration
+
+
+trait TestBase {
+
+ def once(body: (() => Unit) => Unit) {
+ val sv = new SyncVar[Boolean]
+ body(() => sv put true)
+ sv.take()
+ }
+
+ // def assert(cond: => Boolean) {
+ // try {
+ // Predef.assert(cond)
+ // } catch {
+ // case e => e.printStackTrace()
+ // }
+ // }
+
+}
+
+
+trait FutureCallbacks extends TestBase {
+
+ def testOnSuccess(): Unit = once {
+ done =>
+ var x = 0
+ val f = future {
+ x = 1
+ }
+ f onSuccess {
+ case _ =>
+ done()
+ assert(x == 1)
+ }
+ }
+
+ def testOnSuccessWhenCompleted(): Unit = once {
+ done =>
+ var x = 0
+ val f = future {
+ x = 1
+ }
+ f onSuccess {
+ case _ =>
+ assert(x == 1)
+ x = 2
+ f onSuccess {
+ case _ =>
+ assert(x == 2)
+ done()
+ }
+ }
+ }
+
+ def testOnSuccessWhenFailed(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ done()
+ throw new Exception
+ }
+ f onSuccess {
+ case _ => assert(false)
+ }
+ }
+
+ def testOnFailure(): Unit = once {
+ done =>
+ var x = 0
+ val f = future[Unit] {
+ x = 1
+ throw new Exception
+ }
+ f onSuccess {
+ case _ =>
+ done()
+ assert(false)
+ }
+ f onFailure {
+ case _ =>
+ done()
+ assert(x == 1)
+ }
+ }
+
+ def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
+ done =>
+ val f = future[Unit] {
+ throw cause
+ }
+ f onSuccess {
+ case _ =>
+ done()
+ assert(false)
+ }
+ f onFailure {
+ case e: ExecutionException if (e.getCause == cause) =>
+ done()
+ case _ =>
+ done()
+ assert(false)
+ }
+ }
+
+ def testOnFailureWhenTimeoutException(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ throw new TimeoutException()
+ }
+ f onSuccess {
+ case _ =>
+ done()
+ assert(false)
+ }
+ f onFailure {
+ case e: TimeoutException =>
+ done()
+ case other =>
+ done()
+ assert(false)
+ }
+ }
+
+ testOnSuccess()
+ testOnSuccessWhenCompleted()
+ testOnSuccessWhenFailed()
+ testOnFailure()
+ testOnFailureWhenSpecialThrowable(5, new Error)
+ testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
+ testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+ testOnFailureWhenTimeoutException()
+
+}
+
+
+trait FutureCombinators extends TestBase {
+
+ // map: stub
+ def testMapSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testMapFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // flatMap: stub
+ def testFlatMapSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testFlatMapFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // filter: stub
+ def testFilterSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testFilterFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // collect: stub
+ def testCollectSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testCollectFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ // foreach: stub
+ def testForeachSuccess(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testForeachFailure(): Unit = once {
+ done =>
+ done()
+ }
+
+ def testRecoverSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recover {
+ case re: RuntimeException =>
+ "recovered"
+ } onSuccess {
+ case x =>
+ done()
+ assert(x == "recovered")
+ } onFailure { case any =>
+ done()
+ assert(false)
+ }
+ }
+
+ def testRecoverFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recover {
+ case te: TimeoutException => "timeout"
+ } onSuccess {
+ case x =>
+ done()
+ assert(false)
+ } onFailure { case any =>
+ done()
+ assert(any == cause)
+ }
+ }
+
+ testMapSuccess()
+ testMapFailure()
+ testFlatMapSuccess()
+ testFlatMapFailure()
+ testFilterSuccess()
+ testFilterFailure()
+ testCollectSuccess()
+ testCollectFailure()
+ testForeachSuccess()
+ testForeachFailure()
+ testRecoverSuccess()
+ testRecoverFailure()
+
+}
+
+
+trait FutureProjections extends TestBase {
+
+ def testFailedFailureOnComplete(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ f.failed onComplete {
+ case Success(t) =>
+ assert(t == cause)
+ done()
+ case Failure(t) =>
+ assert(false)
+ }
+ }
+
+ def testFailedFailureOnSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ f.failed onSuccess {
+ case t =>
+ assert(t == cause)
+ done()
+ }
+ }
+
+ def testFailedSuccessOnComplete(): Unit = once {
+ done =>
+ val f = future { 0 }
+ f.failed onComplete {
+ case Success(t) =>
+ assert(false)
+ case Failure(t) =>
+ assert(t.isInstanceOf[NoSuchElementException])
+ done()
+ }
+ }
+
+ def testFailedSuccessOnFailure(): Unit = once {
+ done =>
+ val f = future { 0 }
+ f.failed onFailure {
+ case nsee: NoSuchElementException =>
+ done()
+ }
+ }
+
+ def testFailedFailureAwait(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ assert(blocking(f.failed, Duration(500, "ms")) == cause)
+ done()
+ }
+
+ def testFailedSuccessAwait(): Unit = once {
+ done =>
+ val f = future { 0 }
+ try {
+ blocking(f.failed, Duration(500, "ms"))
+ assert(false)
+ } catch {
+ case nsee: NoSuchElementException => done()
+ }
+ }
+
+ testFailedFailureOnComplete()
+ testFailedFailureOnSuccess()
+ testFailedSuccessOnComplete()
+ testFailedSuccessOnFailure()
+ testFailedFailureAwait()
+ testFailedSuccessAwait()
+
+}
+
+
+trait Blocking extends TestBase {
+
+ def testAwaitSuccess(): Unit = once {
+ done =>
+ val f = future { 0 }
+ blocking(f, Duration(500, "ms"))
+ done()
+ }
+
+ def testAwaitFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ }
+ try {
+ blocking(f, Duration(500, "ms"))
+ assert(false)
+ } catch {
+ case t =>
+ assert(t == cause)
+ done()
+ }
+ }
+
+ testAwaitSuccess()
+ testAwaitFailure()
+
+}
+
+
+trait Promises extends TestBase {
+
+ def testSuccess(): Unit = once {
+ done =>
+ val p = promise[Int]()
+ val f = p.future
+
+ f.onSuccess {
+ case x =>
+ done()
+ assert(x == 5)
+ } onFailure {
+ case any =>
+ done()
+ assert(false)
+ }
+
+ p.success(5)
+ }
+
+ testSuccess()
+
+}
+
+
+trait Exceptions extends TestBase {
+
+}
+
+
+object Test
+extends App
+with FutureCallbacks
+with FutureCombinators
+with FutureProjections
+with Promises
+with Exceptions
+{
+ System.exit(0)
+}
+
+
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index f58f763a76..81b68f0f5d 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -156,8 +156,8 @@ x = BitSet(0, 8, 9)
y = BitSet(0, 8, 9)
x equals y: true, y equals x: true
-x = Map(C -> 3, B -> 2, A -> 1)
-y = Map(C -> 3, A -> 1, B -> 2)
+x = Map(A -> 1, C -> 3, B -> 2)
+y = Map(A -> 1, C -> 3, B -> 2)
x equals y: true, y equals x: true
x = Set(buffers, title, layers)
@@ -192,6 +192,10 @@ x = TreeSet(1, 2, 3)
y = TreeSet(1, 2, 3)
x equals y: true, y equals x: true
+x = Ctrie(1 -> one, 2 -> two, 3 -> three)
+y = Ctrie(1 -> one, 2 -> two, 3 -> three)
+x equals y: true, y equals x: true
+
x = xml:src="hello"
y = xml:src="hello"
x equals y: true, y equals x: true
@@ -279,8 +283,12 @@ x = ParArray(abc, def, etc)
y = ParArray(abc, def, etc)
x equals y: true, y equals x: true
-x = ParHashMap(1 -> 2, 2 -> 4)
-y = ParHashMap(1 -> 2, 2 -> 4)
+x = ParHashMap(2 -> 4, 1 -> 2)
+y = ParHashMap(2 -> 4, 1 -> 2)
+x equals y: true, y equals x: true
+
+x = ParCtrie(1 -> 2, 2 -> 4)
+y = ParCtrie(1 -> 2, 2 -> 4)
x equals y: true, y equals x: true
x = ParHashSet(1, 2, 3)
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 73bed2d46b..75daa8903d 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -286,7 +286,7 @@ object Test3_mutable {
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray, TreeSet}
+ Stack, StringBuilder, WrappedArray, TreeSet, Ctrie}
// in alphabetic order
try {
@@ -385,6 +385,11 @@ object Test3_mutable {
val ts1 = TreeSet[Int]() ++= Array(1, 2, 3)
val _ts1: TreeSet[Int] = read(write(ts1))
check(ts1, _ts1)
+
+ // Ctrie
+ val ct1 = Ctrie[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three")
+ val _ct1: Ctrie[Int, String] = read(write(ct1))
+ check(ct1, _ct1)
}
catch {
case e: Exception =>
@@ -608,6 +613,11 @@ object Test9_parallel {
val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm))
check(mpm, _mpm)
+ // mutable.ParCtrie
+ val mpc = mutable.ParCtrie(1 -> 2, 2 -> 4)
+ val _mpc: mutable.ParCtrie[Int, Int] = read(write(mpc))
+ check(mpc, _mpc)
+
// mutable.ParHashSet
val mps = mutable.ParHashSet(1, 2, 3)
val _mps: mutable.ParHashSet[Int] = read(write(mps))
diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala
index 49a216c05c..3befc7ff3f 100644
--- a/test/files/jvm/typerep.scala
+++ b/test/files/jvm/typerep.scala
@@ -161,7 +161,7 @@ object TypeRep {
}).asInstanceOf[TypeRep[Option[A]]]
def getType[A](x: List[A])(implicit rep: TypeRep[A]): TypeRep[List[A]] = (x match {
- case h :: t => ListRep(getType(h))
+ case h :: t => ListRep(rep)
case Nil => NilRep
}).asInstanceOf[TypeRep[List[A]]]
diff --git a/test/files/neg/abstraction-from-volatile-type-error.check b/test/files/neg/abstraction-from-volatile-type-error.check
new file mode 100644
index 0000000000..34ba0551a5
--- /dev/null
+++ b/test/files/neg/abstraction-from-volatile-type-error.check
@@ -0,0 +1,4 @@
+abstraction-from-volatile-type-error.scala:9: error: illegal abstraction from value with volatile type a.Tv
+ val tv : a.Tv
+ ^
+one error found
diff --git a/test/files/neg/abstraction-from-volatile-type-error.scala b/test/files/neg/abstraction-from-volatile-type-error.scala
new file mode 100644
index 0000000000..5afcb3ec7d
--- /dev/null
+++ b/test/files/neg/abstraction-from-volatile-type-error.scala
@@ -0,0 +1,11 @@
+class A {
+ type T
+ type Tv = AnyRef with T
+}
+
+object Test {
+ type B = a.type forSome {
+ val a : A
+ val tv : a.Tv
+ }
+}
diff --git a/test/files/neg/constructor-prefix-error.check b/test/files/neg/constructor-prefix-error.check
new file mode 100644
index 0000000000..87e948881b
--- /dev/null
+++ b/test/files/neg/constructor-prefix-error.check
@@ -0,0 +1,4 @@
+constructor-prefix-error.scala:6: error: Outer is not a legal prefix for a constructor
+ val x = new Outer#Inner
+ ^
+one error found
diff --git a/test/files/neg/constructor-prefix-error.scala b/test/files/neg/constructor-prefix-error.scala
new file mode 100644
index 0000000000..c2accea284
--- /dev/null
+++ b/test/files/neg/constructor-prefix-error.scala
@@ -0,0 +1,7 @@
+class Outer {
+ class Inner
+}
+
+object Test {
+ val x = new Outer#Inner
+}
diff --git a/test/files/neg/error_dependentMethodTpeConversionToFunction.check b/test/files/neg/error_dependentMethodTpeConversionToFunction.check
new file mode 100644
index 0000000000..3496a552c4
--- /dev/null
+++ b/test/files/neg/error_dependentMethodTpeConversionToFunction.check
@@ -0,0 +1,4 @@
+error_dependentMethodTpeConversionToFunction.scala:4: error: method with dependent type (x: AnyRef)x.type cannot be converted to function value
+ val x: Any => Any = foo
+ ^
+one error found
diff --git a/test/files/neg/error_dependentMethodTpeConversionToFunction.scala b/test/files/neg/error_dependentMethodTpeConversionToFunction.scala
new file mode 100644
index 0000000000..22649e5098
--- /dev/null
+++ b/test/files/neg/error_dependentMethodTpeConversionToFunction.scala
@@ -0,0 +1,5 @@
+// test DependentMethodTpeConversionToFunctionError
+object Test {
+ def foo(x: AnyRef): x.type = x
+ val x: Any => Any = foo
+} \ No newline at end of file
diff --git a/test/files/neg/error_tooManyArgsPattern.check b/test/files/neg/error_tooManyArgsPattern.check
new file mode 100644
index 0000000000..ee401ad061
--- /dev/null
+++ b/test/files/neg/error_tooManyArgsPattern.check
@@ -0,0 +1,4 @@
+error_tooManyArgsPattern.scala:3: error: too many arguments for unapply pattern, maximum = 22
+ case List(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => 7
+ ^
+one error found
diff --git a/test/files/neg/error_tooManyArgsPattern.scala b/test/files/neg/error_tooManyArgsPattern.scala
new file mode 100644
index 0000000000..d55ba61001
--- /dev/null
+++ b/test/files/neg/error_tooManyArgsPattern.scala
@@ -0,0 +1,5 @@
+object Test {
+ def test(xs: Any) = xs match { // test error message TooManyArgsPatternError
+ case List(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => 7
+ }
+}
diff --git a/test/files/neg/finitary-error.check b/test/files/neg/finitary-error.check
new file mode 100644
index 0000000000..7bc92058ca
--- /dev/null
+++ b/test/files/neg/finitary-error.check
@@ -0,0 +1,4 @@
+finitary-error.scala:3: error: class graph is not finitary because type parameter T is expansively recursive
+trait C[T] extends A[C[B[T]]]
+ ^
+one error found
diff --git a/test/files/neg/finitary-error.scala b/test/files/neg/finitary-error.scala
new file mode 100644
index 0000000000..a48fcdc70f
--- /dev/null
+++ b/test/files/neg/finitary-error.scala
@@ -0,0 +1,3 @@
+trait A[T]
+trait B[T]
+trait C[T] extends A[C[B[T]]]
diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check
index cd9dfebf48..6d61f7f222 100644
--- a/test/files/neg/implicits.check
+++ b/test/files/neg/implicits.check
@@ -8,4 +8,7 @@ implicits.scala:46: error: type mismatch;
required: List[Mxml]
children.toList.flatMap ( e => {
^
-two errors found
+implicits.scala:66: error: could not find implicit value for parameter x: Nothing
+ foo {
+ ^
+three errors found
diff --git a/test/files/neg/implicits.scala b/test/files/neg/implicits.scala
index e908fb03e8..878d3a7c99 100644
--- a/test/files/neg/implicits.scala
+++ b/test/files/neg/implicits.scala
@@ -56,3 +56,19 @@ class Mxml {
}
}
+
+// SI-5316
+class Test3 {
+ def foo(p: => Any)(implicit x: Nothing): Unit = ()
+
+ object X
+
+ foo {
+ val a = 0
+
+ {
+ import X._
+ a
+ }
+ }
+}
diff --git a/test/files/neg/macro-argtype-mismatch.check b/test/files/neg/macro-argtype-mismatch.check
new file mode 100644
index 0000000000..dd867be804
--- /dev/null
+++ b/test/files/neg/macro-argtype-mismatch.check
@@ -0,0 +1,6 @@
+Test_2.scala:3: error: type mismatch;
+ found : String("2")
+ required: Int
+ foo("2")
+ ^
+one error found
diff --git a/test/files/neg/macro-argtype-mismatch.flags b/test/files/neg/macro-argtype-mismatch.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-argtype-mismatch.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-argtype-mismatch/Macros_1.scala b/test/files/neg/macro-argtype-mismatch/Macros_1.scala
new file mode 100644
index 0000000000..4b5f98ba37
--- /dev/null
+++ b/test/files/neg/macro-argtype-mismatch/Macros_1.scala
@@ -0,0 +1,3 @@
+object Macros {
+ def macro foo(x: Int) = x
+} \ No newline at end of file
diff --git a/test/files/neg/macro-argtype-mismatch/Test_2.scala b/test/files/neg/macro-argtype-mismatch/Test_2.scala
new file mode 100644
index 0000000000..18feb69425
--- /dev/null
+++ b/test/files/neg/macro-argtype-mismatch/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ foo("2")
+} \ No newline at end of file
diff --git a/test/files/neg/macro-noexpand.check b/test/files/neg/macro-noexpand.check
new file mode 100644
index 0000000000..c15d54bb32
--- /dev/null
+++ b/test/files/neg/macro-noexpand.check
@@ -0,0 +1,4 @@
+Test_2.scala:3: error: not found: value x
+ foo(x)
+ ^
+one error found
diff --git a/test/files/neg/macro-noexpand.flags b/test/files/neg/macro-noexpand.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-noexpand.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-noexpand/Macros_1.scala b/test/files/neg/macro-noexpand/Macros_1.scala
new file mode 100644
index 0000000000..7a6aadf6a1
--- /dev/null
+++ b/test/files/neg/macro-noexpand/Macros_1.scala
@@ -0,0 +1,3 @@
+object Macros {
+ def macro foo(x: Any) = ???
+} \ No newline at end of file
diff --git a/test/files/neg/macro-noexpand/Test_2.scala b/test/files/neg/macro-noexpand/Test_2.scala
new file mode 100644
index 0000000000..0bed592883
--- /dev/null
+++ b/test/files/neg/macro-noexpand/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ foo(x)
+} \ No newline at end of file
diff --git a/test/files/neg/macro-noncompilertree.check b/test/files/neg/macro-noncompilertree.check
new file mode 100644
index 0000000000..616765a39e
--- /dev/null
+++ b/test/files/neg/macro-noncompilertree.check
@@ -0,0 +1,6 @@
+Macros_1.scala:2: error: type mismatch;
+ found : reflect.mirror.Literal
+ required: _context.Tree
+ def macro foo = scala.reflect.mirror.Literal(scala.reflect.mirror.Constant(2))
+ ^
+one error found
diff --git a/test/files/neg/macro-noncompilertree.flags b/test/files/neg/macro-noncompilertree.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-noncompilertree.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-noncompilertree/Macros_1.scala b/test/files/neg/macro-noncompilertree/Macros_1.scala
new file mode 100644
index 0000000000..eb1253e5e9
--- /dev/null
+++ b/test/files/neg/macro-noncompilertree/Macros_1.scala
@@ -0,0 +1,3 @@
+object Macros {
+ def macro foo = scala.reflect.mirror.Literal(scala.reflect.mirror.Constant(2))
+} \ No newline at end of file
diff --git a/test/files/neg/macro-nontree.check b/test/files/neg/macro-nontree.check
new file mode 100644
index 0000000000..a1c7139580
--- /dev/null
+++ b/test/files/neg/macro-nontree.check
@@ -0,0 +1,6 @@
+Macros_1.scala:2: error: type mismatch;
+ found : Int(2)
+ required: _context.Tree
+ def macro foo = 2
+ ^
+one error found
diff --git a/test/files/neg/macro-nontree.flags b/test/files/neg/macro-nontree.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-nontree.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-nontree/Macros_1.scala b/test/files/neg/macro-nontree/Macros_1.scala
new file mode 100644
index 0000000000..2433974a85
--- /dev/null
+++ b/test/files/neg/macro-nontree/Macros_1.scala
@@ -0,0 +1,3 @@
+object Macros {
+ def macro foo = 2
+} \ No newline at end of file
diff --git a/test/files/neg/parent-inherited-twice-error.check b/test/files/neg/parent-inherited-twice-error.check
new file mode 100644
index 0000000000..521a6c19d0
--- /dev/null
+++ b/test/files/neg/parent-inherited-twice-error.check
@@ -0,0 +1,7 @@
+parent-inherited-twice-error.scala:2: error: trait A is inherited twice
+class B extends A with A
+ ^
+parent-inherited-twice-error.scala:2: error: trait A is inherited twice
+class B extends A with A
+ ^
+two errors found
diff --git a/test/files/neg/parent-inherited-twice-error.scala b/test/files/neg/parent-inherited-twice-error.scala
new file mode 100644
index 0000000000..7b433b9860
--- /dev/null
+++ b/test/files/neg/parent-inherited-twice-error.scala
@@ -0,0 +1,2 @@
+trait A
+class B extends A with A
diff --git a/test/files/neg/qualifying-class-error-1.check b/test/files/neg/qualifying-class-error-1.check
new file mode 100644
index 0000000000..c70db9ba60
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-1.check
@@ -0,0 +1,4 @@
+qualifying-class-error-1.scala:2: error: this can be used only in a class, object, or template
+class B extends A(this.getClass.getName.length)
+ ^
+one error found
diff --git a/test/files/neg/qualifying-class-error-1.scala b/test/files/neg/qualifying-class-error-1.scala
new file mode 100644
index 0000000000..09152fe04c
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-1.scala
@@ -0,0 +1,2 @@
+class A(val i:Int)
+class B extends A(this.getClass.getName.length)
diff --git a/test/files/neg/qualifying-class-error-2.check b/test/files/neg/qualifying-class-error-2.check
new file mode 100644
index 0000000000..50c2759685
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-2.check
@@ -0,0 +1,4 @@
+qualifying-class-error-2.scala:9: error: A is not an enclosing class
+ protected[A] def f() {}
+ ^
+one error found
diff --git a/test/files/neg/qualifying-class-error-2.scala b/test/files/neg/qualifying-class-error-2.scala
new file mode 100644
index 0000000000..d3aa8664bd
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-2.scala
@@ -0,0 +1,11 @@
+package A {
+ trait X {
+ protected[A] def f()
+ }
+}
+
+package B {
+ class Y extends A.X {
+ protected[A] def f() {}
+ }
+}
diff --git a/test/files/neg/reify_ann2a.check b/test/files/neg/reify_ann2a.check
new file mode 100644
index 0000000000..2afe37e1d8
--- /dev/null
+++ b/test/files/neg/reify_ann2a.check
@@ -0,0 +1,4 @@
+reify_ann2a.scala:9: error: exception during macro expansion: implementation restriction: cannot reify annotation @ann(immutable.this.List.apply[String]("1a")) which involves a symbol declared inside the block being reified
+ val tree = scala.reflect.Code.lift{
+ ^
+one error found
diff --git a/test/files/neg/reify_ann2a.scala b/test/files/neg/reify_ann2a.scala
new file mode 100644
index 0000000000..8de0984074
--- /dev/null
+++ b/test/files/neg/reify_ann2a.scala
@@ -0,0 +1,30 @@
+import scala.reflect._
+import scala.reflect.api._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ // test 1: reify
+ val tree = scala.reflect.Code.lift{
+ class ann(bar: List[String]) extends StaticAnnotation
+
+ @ann(bar=List("1a")) @ann(bar=List("1b")) class C[@ann(bar=List("2a")) @ann(bar=List("2b")) T](@ann(bar=List("3a")) @ann(bar=List("3b")) x: T @ann(bar=List("4a")) @ann(bar=List("4b"))) {
+ @ann(bar=List("5a")) @ann(bar=List("5b")) def f(x: Int @ann(bar=List("6a")) @ann(bar=List("6b"))) = {
+ @ann(bar=List("7a")) @ann(bar=List("7b")) val r = (x + 3): @ann(bar=List("8a")) @ann(bar=List("8b"))
+ val s = 4: Int @ann(bar=List("9a")) @ann(bar=List("9b"))
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.runExpr(tree)
+} \ No newline at end of file
diff --git a/test/files/neg/reify_ann2b.check b/test/files/neg/reify_ann2b.check
new file mode 100644
index 0000000000..ceb70689f1
--- /dev/null
+++ b/test/files/neg/reify_ann2b.check
@@ -0,0 +1,7 @@
+reify_ann2b.scala:10: error: inner classes cannot be classfile annotations
+ class ann(bar: String) extends ClassfileAnnotation
+ ^
+reify_ann2b.scala:9: error: exception during macro expansion: implementation restriction: cannot reify annotation @ann(bar = "1a") which involves a symbol declared inside the block being reified
+ val tree = scala.reflect.Code.lift{
+ ^
+two errors found
diff --git a/test/files/neg/reify_ann2b.scala b/test/files/neg/reify_ann2b.scala
new file mode 100644
index 0000000000..b43567c2a7
--- /dev/null
+++ b/test/files/neg/reify_ann2b.scala
@@ -0,0 +1,30 @@
+import scala.reflect._
+import scala.reflect.api._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ // test 1: reify
+ val tree = scala.reflect.Code.lift{
+ class ann(bar: String) extends ClassfileAnnotation
+
+ @ann(bar="1a") @ann(bar="1b") class C[@ann(bar="2a") @ann(bar="2b") T](@ann(bar="3a") @ann(bar="3b") x: T @ann(bar="4a") @ann(bar="4b")) {
+ @ann(bar="5a") @ann(bar="5b") def f(x: Int @ann(bar="6a") @ann(bar="6b")) = {
+ @ann(bar="7a") @ann(bar="7b") val r = (x + 3): @ann(bar="8a") @ann(bar="8b")
+ val s = 4: Int @ann(bar="9a") @ann(bar="9b")
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.runExpr(tree)
+} \ No newline at end of file
diff --git a/test/files/neg/t200.check b/test/files/neg/t200.check
index 78701f8533..3ef6665fe5 100644
--- a/test/files/neg/t200.check
+++ b/test/files/neg/t200.check
@@ -1,4 +1,4 @@
-t200.scala:7: error: method foo is defined twice
+t200.scala:7: error: method foo is defined twice in t200.scala
def foo: Int;
^
one error found
diff --git a/test/files/neg/t2779.check b/test/files/neg/t2779.check
index 4f94a780a1..d642541e3e 100644
--- a/test/files/neg/t2779.check
+++ b/test/files/neg/t2779.check
@@ -1,4 +1,4 @@
-t2779.scala:16: error: method f is defined twice
+t2779.scala:16: error: method f is defined twice in t2779.scala
override def f = List(M1)
^
one error found
diff --git a/test/files/neg/t278.check b/test/files/neg/t278.check
index 675ef910ee..0c2dfeb67a 100644
--- a/test/files/neg/t278.check
+++ b/test/files/neg/t278.check
@@ -4,7 +4,7 @@ t278.scala:5: error: overloaded method value a with alternatives:
does not take type parameters
println(a[A])
^
-t278.scala:4: error: method a is defined twice
+t278.scala:4: error: method a is defined twice in t278.scala
def a = (p:A) => ()
^
two errors found
diff --git a/test/files/neg/t3275.check b/test/files/neg/t3275.check
new file mode 100644
index 0000000000..117c792321
--- /dev/null
+++ b/test/files/neg/t3275.check
@@ -0,0 +1,4 @@
+t3275.scala:2: error: @tailrec annotated method contains no recursive calls
+ @annotation.tailrec def foo() = 5
+ ^
+one error found
diff --git a/test/files/neg/t3275.scala b/test/files/neg/t3275.scala
new file mode 100644
index 0000000000..18e38a1a97
--- /dev/null
+++ b/test/files/neg/t3275.scala
@@ -0,0 +1,3 @@
+object Test {
+ @annotation.tailrec def foo() = 5
+}
diff --git a/test/files/neg/t414.check b/test/files/neg/t414.check
index a855497648..e15dbaea71 100644
--- a/test/files/neg/t414.check
+++ b/test/files/neg/t414.check
@@ -1,5 +1,5 @@
t414.scala:5: error: pattern type is incompatible with expected type;
- found : object Empty
+ found : Empty.type
required: IntMap[a]
Note: if you intended to match against the class, try `case _: Empty[_]` or `case Empty()`
case Empty =>
diff --git a/test/files/neg/t452.check b/test/files/neg/t452.check
index 85197af21b..aac663068e 100644
--- a/test/files/neg/t452.check
+++ b/test/files/neg/t452.check
@@ -1,5 +1,5 @@
t452.scala:3: error: type mismatch;
- found : Test.type (with underlying type object Test)
+ found : Test.type
required: Test.Foo
def this() = this(this);
^
diff --git a/test/files/neg/t4879.check b/test/files/neg/t4879.check
index 49f3c73cf7..21cd329640 100644
--- a/test/files/neg/t4879.check
+++ b/test/files/neg/t4879.check
@@ -1,11 +1,11 @@
t4879.scala:6: error: pattern type is incompatible with expected type;
- found : object C
+ found : C.type
required: C
Note: if you intended to match against the class, try `case _: C` or `case C(_)`
case C => true
^
t4879.scala:10: error: pattern type is incompatible with expected type;
- found : object D
+ found : D.type
required: D[T,U,V]
Note: if you intended to match against the class, try `case _: D[_,_,_]` or `case D(_,_,_)`
case D => true
diff --git a/test/files/neg/t5189.check b/test/files/neg/t5189.check
new file mode 100644
index 0000000000..7762f465dc
--- /dev/null
+++ b/test/files/neg/t5189.check
@@ -0,0 +1,6 @@
+t5189.scala:3: error: type mismatch;
+ found : Nothing => Any
+ required: Any => Any
+ def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
+ ^
+one error found \ No newline at end of file
diff --git a/test/files/neg/t5189.scala b/test/files/neg/t5189.scala
new file mode 100644
index 0000000000..19e8e74667
--- /dev/null
+++ b/test/files/neg/t5189.scala
@@ -0,0 +1,5 @@
+class TestNeg1 {
+ case class Foo[T, U](f: T => U)
+ def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
+ // uh-oh, Any => Any should be Nothing => Any.
+}
diff --git a/test/files/neg/t5189b.check b/test/files/neg/t5189b.check
new file mode 100644
index 0000000000..7f78cbb438
--- /dev/null
+++ b/test/files/neg/t5189b.check
@@ -0,0 +1,8 @@
+t5189b.scala:25: error: type mismatch;
+ found : TestNeg.Wrapped[?T2] where type ?T2 <: T
+ required: TestNeg.Wrapped[T]
+Note: ?T2 <: T, but class Wrapped is invariant in type W.
+You may wish to define W as +W instead. (SLS 4.5)
+ case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter
+ ^
+one error found
diff --git a/test/files/neg/t5189b.scala b/test/files/neg/t5189b.scala
new file mode 100644
index 0000000000..1750f14084
--- /dev/null
+++ b/test/files/neg/t5189b.scala
@@ -0,0 +1,62 @@
+class TestPos {
+ class AbsWrapperCov[+A]
+ case class Wrapper[B](x: B) extends AbsWrapperCov[B]
+
+ def unwrap[T](x: AbsWrapperCov[T]): T = x match {
+ case Wrapper/*[_ <: T ]*/(x) => x // _ <: T, which is a subtype of T
+ }
+}
+
+object TestNeg extends App {
+ class AbsWrapperCov[+A]
+ case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+
+ /*
+ when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+
+ since method application is the only way to generate this slack between run-time and compile-time types,
+ we'll simply replace the skolems that represent method type parameters as seen from the method's body by
+ other skolems that are (upper/lower)-bounded by the type-parameter skolems
+ (depending on whether the skolem appears in a covariant/contravariant position)
+ */
+ def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter
+ }
+
+ class Wrapped[W](var cell: W) // must be invariant (to trigger the bug)
+
+ // class A { def imNotAB = println("notB")}
+ // class B
+ //
+ // val w = new Wrapped(new A)
+ // unwrap[Any](Wrapper(w)).cell = new B
+ // w.cell.imNotAB
+}
+
+// class TestPos1 {
+// class Base[T]
+// case class C[T](x: T) extends Base[T]
+// def foo[T](b: Base[T]): T = b match { case C(x) => x }
+//
+// case class Span[K <: Ordered[K]](low: Option[K], high: Option[K]) extends Function1[K, Boolean] {
+// override def equals(x$1: Any): Boolean = x$1 match {
+// case Span((low$0 @ _), (high$0 @ _)) if low$0.equals(low).$amp$amp(high$0.equals(high)) => true
+// case _ => false
+// }
+// def apply(k: K): Boolean = this match {
+// case Span(Some(low), Some(high)) => (k >= low && k <= high)
+// case Span(Some(low), None) => (k >= low)
+// case Span(None, Some(high)) => (k <= high)
+// case _ => false
+// }
+// }
+// }
+//
+// class TestNeg1 {
+// case class Foo[T, U](f: T => U)
+// def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
+// // uh-oh, Any => Any should be Nothing => Any.
+// }
+
diff --git a/test/files/neg/t5358.check b/test/files/neg/t5358.check
new file mode 100644
index 0000000000..59e83bba2f
--- /dev/null
+++ b/test/files/neg/t5358.check
@@ -0,0 +1,7 @@
+t5358.scala:3: error: class C inherits conflicting members:
+ method hi in trait A of type => String and
+ method hi in trait B of type => String
+(Note: this can be resolved by declaring an override in class C.)
+class C extends A with B
+ ^
+one error found
diff --git a/test/files/neg/t5358.scala b/test/files/neg/t5358.scala
new file mode 100644
index 0000000000..13d827ed82
--- /dev/null
+++ b/test/files/neg/t5358.scala
@@ -0,0 +1,4 @@
+trait A { def hi = "A" }
+trait B { def hi = "B" }
+class C extends A with B
+
diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check
new file mode 100644
index 0000000000..1b89c59587
--- /dev/null
+++ b/test/files/neg/t5429.check
@@ -0,0 +1,132 @@
+t5429.scala:20: error: overriding value value in class A of type Int;
+ object value needs `override' modifier
+ object value // fail
+ ^
+t5429.scala:21: error: overriding lazy value lazyvalue in class A of type Int;
+ object lazyvalue needs `override' modifier
+ object lazyvalue // fail
+ ^
+t5429.scala:22: error: overriding method nullary in class A of type => Int;
+ object nullary needs `override' modifier
+ object nullary // fail
+ ^
+t5429.scala:23: error: overriding method emptyArg in class A of type ()Int;
+ object emptyArg needs `override' modifier
+ object emptyArg // fail
+ ^
+t5429.scala:27: error: overriding value value in class A0 of type Any;
+ object value needs `override' modifier
+ object value // fail
+ ^
+t5429.scala:28: error: overriding lazy value lazyvalue in class A0 of type Any;
+ object lazyvalue needs `override' modifier
+ object lazyvalue // fail
+ ^
+t5429.scala:29: error: overriding method nullary in class A0 of type => Any;
+ object nullary needs `override' modifier
+ object nullary // fail
+ ^
+t5429.scala:30: error: overriding method emptyArg in class A0 of type ()Any;
+ object emptyArg needs `override' modifier
+ object emptyArg // fail
+ ^
+t5429.scala:35: error: overriding value value in class A of type Int;
+ object value has incompatible type
+ override object value // fail
+ ^
+t5429.scala:36: error: overriding lazy value lazyvalue in class A of type Int;
+ object lazyvalue must be declared lazy to override a concrete lazy value
+ override object lazyvalue // fail
+ ^
+t5429.scala:37: error: overriding method nullary in class A of type => Int;
+ object nullary has incompatible type
+ override object nullary // fail
+ ^
+t5429.scala:38: error: overriding method emptyArg in class A of type ()Int;
+ object emptyArg has incompatible type
+ override object emptyArg // fail
+ ^
+t5429.scala:39: error: object oneArg overrides nothing
+ override object oneArg // fail
+ ^
+t5429.scala:43: error: overriding lazy value lazyvalue in class A0 of type Any;
+ object lazyvalue must be declared lazy to override a concrete lazy value
+ override object lazyvalue // !!! this fails, but should succeed (lazy over lazy)
+ ^
+t5429.scala:46: error: object oneArg overrides nothing
+ override object oneArg // fail
+ ^
+t5429.scala:50: error: overriding value value in class A of type Int;
+ value value needs `override' modifier
+ val value = 0 // fail
+ ^
+t5429.scala:51: error: overriding lazy value lazyvalue in class A of type Int;
+ value lazyvalue needs `override' modifier
+ val lazyvalue = 0 // fail
+ ^
+t5429.scala:52: error: overriding method nullary in class A of type => Int;
+ value nullary needs `override' modifier
+ val nullary = 5 // fail
+ ^
+t5429.scala:53: error: overriding method emptyArg in class A of type ()Int;
+ value emptyArg needs `override' modifier
+ val emptyArg = 10 // fail
+ ^
+t5429.scala:58: error: overriding lazy value lazyvalue in class A0 of type Any;
+ value lazyvalue must be declared lazy to override a concrete lazy value
+ override val lazyvalue = 0 // fail (non-lazy)
+ ^
+t5429.scala:61: error: value oneArg overrides nothing
+ override val oneArg = 15 // fail
+ ^
+t5429.scala:65: error: overriding value value in class A of type Int;
+ method value needs `override' modifier
+ def value = 0 // fail
+ ^
+t5429.scala:66: error: overriding lazy value lazyvalue in class A of type Int;
+ method lazyvalue needs `override' modifier
+ def lazyvalue = 2 // fail
+ ^
+t5429.scala:67: error: overriding method nullary in class A of type => Int;
+ method nullary needs `override' modifier
+ def nullary = 5 // fail
+ ^
+t5429.scala:68: error: overriding method emptyArg in class A of type ()Int;
+ method emptyArg needs `override' modifier
+ def emptyArg = 10 // fail
+ ^
+t5429.scala:72: error: overriding value value in class A0 of type Any;
+ method value needs to be a stable, immutable value
+ override def value = 0 // fail
+ ^
+t5429.scala:73: error: overriding lazy value lazyvalue in class A0 of type Any;
+ method lazyvalue needs to be a stable, immutable value
+ override def lazyvalue = 2 // fail
+ ^
+t5429.scala:76: error: method oneArg overrides nothing
+ override def oneArg = 15 // fail
+ ^
+t5429.scala:80: error: overriding value value in class A of type Int;
+ lazy value value needs `override' modifier
+ lazy val value = 0 // fail
+ ^
+t5429.scala:81: error: overriding lazy value lazyvalue in class A of type Int;
+ lazy value lazyvalue needs `override' modifier
+ lazy val lazyvalue = 2 // fail
+ ^
+t5429.scala:82: error: overriding method nullary in class A of type => Int;
+ lazy value nullary needs `override' modifier
+ lazy val nullary = 5 // fail
+ ^
+t5429.scala:83: error: overriding method emptyArg in class A of type ()Int;
+ lazy value emptyArg needs `override' modifier
+ lazy val emptyArg = 10 // fail
+ ^
+t5429.scala:87: error: overriding value value in class A0 of type Any;
+ lazy value value cannot override a concrete non-lazy value
+ override lazy val value = 0 // fail (strict over lazy)
+ ^
+t5429.scala:91: error: value oneArg overrides nothing
+ override lazy val oneArg = 15 // fail
+ ^
+34 errors found
diff --git a/test/files/neg/t5429.scala b/test/files/neg/t5429.scala
new file mode 100644
index 0000000000..1cd4dcd032
--- /dev/null
+++ b/test/files/neg/t5429.scala
@@ -0,0 +1,93 @@
+// /scala/trac/5429/a.scala
+// Wed Feb 1 08:05:27 PST 2012
+
+class A {
+ val value = 0
+ lazy val lazyvalue = 2
+ def nullary = 5
+ def emptyArg() = 10
+ def oneArg(x: String) = 15
+}
+class A0 {
+ val value: Any = 0
+ lazy val lazyvalue: Any = 2
+ def nullary: Any = 5
+ def emptyArg(): Any = 10
+ def oneArg(x: String): Any = 15
+}
+
+class B extends A {
+ object value // fail
+ object lazyvalue // fail
+ object nullary // fail
+ object emptyArg // fail
+ object oneArg // overload
+}
+class B0 extends A0 {
+ object value // fail
+ object lazyvalue // fail
+ object nullary // fail
+ object emptyArg // fail
+ object oneArg // overload
+}
+
+class C extends A {
+ override object value // fail
+ override object lazyvalue // fail
+ override object nullary // fail
+ override object emptyArg // fail
+ override object oneArg // fail
+}
+class C0 extends A0 {
+ override object value // !!! this succeeds, but should fail (lazy over strict)
+ override object lazyvalue // !!! this fails, but should succeed (lazy over lazy)
+ override object nullary // override
+ override object emptyArg // override
+ override object oneArg // fail
+}
+
+class D extends A {
+ val value = 0 // fail
+ val lazyvalue = 0 // fail
+ val nullary = 5 // fail
+ val emptyArg = 10 // fail
+ val oneArg = 15 // overload
+}
+class D0 extends A0 {
+ override val value = 0 // override
+ override val lazyvalue = 0 // fail (non-lazy)
+ override val nullary = 5 // override
+ override val emptyArg = 10 // override
+ override val oneArg = 15 // fail
+}
+
+class E extends A {
+ def value = 0 // fail
+ def lazyvalue = 2 // fail
+ def nullary = 5 // fail
+ def emptyArg = 10 // fail
+ def oneArg = 15 // overload
+}
+class E0 extends A0 {
+ override def value = 0 // fail
+ override def lazyvalue = 2 // fail
+ override def nullary = 5 // override
+ override def emptyArg = 10 // override
+ override def oneArg = 15 // fail
+}
+
+class F extends A {
+ lazy val value = 0 // fail
+ lazy val lazyvalue = 2 // fail
+ lazy val nullary = 5 // fail
+ lazy val emptyArg = 10 // fail
+ lazy val oneArg = 15 // overload
+}
+class F0 extends A0 {
+ override lazy val value = 0 // fail (strict over lazy)
+ override lazy val lazyvalue = 2 // override (lazy over lazy)
+ override lazy val nullary = 5 // override
+ override lazy val emptyArg = 10 // override
+ override lazy val oneArg = 15 // fail
+}
+
diff --git a/test/files/neg/t5452.check b/test/files/neg/t5452.check
new file mode 100644
index 0000000000..2f35a45509
--- /dev/null
+++ b/test/files/neg/t5452.check
@@ -0,0 +1,8 @@
+t5452.scala:28: error: overloaded method value apply with alternatives:
+ ()Queryable[CoffeesTable] <and>
+ (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing <and>
+ (implicit evidence$1: Manifest[CoffeesTable])Nothing
+ cannot be applied to (Queryable[CoffeesTable])
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+ ^
+one error found
diff --git a/test/files/neg/t5452.scala b/test/files/neg/t5452.scala
new file mode 100644
index 0000000000..1032db7a4b
--- /dev/null
+++ b/test/files/neg/t5452.scala
@@ -0,0 +1,29 @@
+// /scala/trac/5452/a.scala
+// Mon Feb 13 22:52:36 PST 2012
+
+// import scala.reflect.mirror._
+
+trait Tree
+
+object Bip {
+ def ??? = sys.error("")
+}
+import Bip._
+
+case class Queryable[T]() {
+ def treeFilter( t:Tree ) : Queryable[T] = ???
+}
+
+object Queryable {
+ def apply[T:Manifest] = ???
+ def apply[T:Manifest]( t:Tree ) = ???
+}
+
+trait CoffeesTable{
+ def sales : Int
+}
+
+object Test extends App{
+ val q = new Queryable[CoffeesTable]
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+}
diff --git a/test/files/neg/t5455.check b/test/files/neg/t5455.check
new file mode 100644
index 0000000000..788daf99fa
--- /dev/null
+++ b/test/files/neg/t5455.check
@@ -0,0 +1,4 @@
+t5455.scala:4: error: lazy vals are not tailcall transformed
+ @annotation.tailrec final lazy val bar: Thing[Int] = {
+ ^
+one error found
diff --git a/test/files/neg/t5455.scala b/test/files/neg/t5455.scala
new file mode 100644
index 0000000000..22d6c442c9
--- /dev/null
+++ b/test/files/neg/t5455.scala
@@ -0,0 +1,16 @@
+trait Test {
+ def root: Test
+
+ @annotation.tailrec final lazy val bar: Thing[Int] = {
+ if (this eq root)
+ Thing(() => System.identityHashCode(bar))
+ else
+ root.bar
+ }
+
+ def f = bar.f()
+}
+
+case class Thing[A](f: () => A) {
+ override def toString = "" + f()
+}
diff --git a/test/files/neg/t5493.check b/test/files/neg/t5493.check
new file mode 100644
index 0000000000..78b1536bc7
--- /dev/null
+++ b/test/files/neg/t5493.check
@@ -0,0 +1,4 @@
+t5493.scala:2: error: not found: value iDontExist
+ def meh(xs: Any): Any = xs :: iDontExist :: Nil
+ ^
+one error found
diff --git a/test/files/neg/t5493.scala b/test/files/neg/t5493.scala
new file mode 100644
index 0000000000..459cf53bbd
--- /dev/null
+++ b/test/files/neg/t5493.scala
@@ -0,0 +1,3 @@
+object Test {
+ def meh(xs: Any): Any = xs :: iDontExist :: Nil
+}
diff --git a/test/files/neg/t5497.check b/test/files/neg/t5497.check
new file mode 100644
index 0000000000..fef6d38da0
--- /dev/null
+++ b/test/files/neg/t5497.check
@@ -0,0 +1,4 @@
+t5497.scala:3: error: not found: value sq
+ case other => println(null.asInstanceOf[sq.Filter].tableName)
+ ^
+one error found
diff --git a/test/files/neg/t5497.scala b/test/files/neg/t5497.scala
new file mode 100644
index 0000000000..40d47de12d
--- /dev/null
+++ b/test/files/neg/t5497.scala
@@ -0,0 +1,5 @@
+object TestQueryable extends App{
+ ({
+ case other => println(null.asInstanceOf[sq.Filter].tableName)
+ } : Any => Unit)(null)
+}
diff --git a/test/files/neg/t5529.check b/test/files/neg/t5529.check
new file mode 100644
index 0000000000..78a26aeb50
--- /dev/null
+++ b/test/files/neg/t5529.check
@@ -0,0 +1,12 @@
+t5529.scala:12: error: File is already defined as class File
+ type File
+ ^
+t5529.scala:10: error: class type required but test.Test.File found
+ sealed class Dir extends File { }
+ ^
+t5529.scala:10: error: illegal inheritance; super<none>
+ is not a subclass of the superclass Object
+ of the mixin trait ScalaObject
+ sealed class Dir extends File { }
+ ^
+three errors found
diff --git a/test/files/neg/t5529.scala b/test/files/neg/t5529.scala
new file mode 100644
index 0000000000..033009a8a6
--- /dev/null
+++ b/test/files/neg/t5529.scala
@@ -0,0 +1,13 @@
+// /scala/trac/5529/a.scala
+// Tue Feb 28 13:11:28 PST 2012
+
+package test;
+
+object Test {
+ sealed class File {
+ val i = 1
+ }
+ sealed class Dir extends File { }
+
+ type File
+}
diff --git a/test/files/neg/t5553_1.check b/test/files/neg/t5553_1.check
new file mode 100644
index 0000000000..afd6489888
--- /dev/null
+++ b/test/files/neg/t5553_1.check
@@ -0,0 +1,54 @@
+t5553_1.scala:18: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (z: String)Base[T]
+and method apply in object Foo1 of type (a: Int)Base[T]
+match expected type ?
+ def test1[T] = Foo1[T]
+ ^
+t5553_1.scala:19: error: type mismatch;
+ found : [T](z: String)Base[T] <and> (a: Int)Base[T]
+ required: Int
+ def test2[T]: Int = Foo1[T]
+ ^
+t5553_1.scala:20: error: type mismatch;
+ found : [T(in method apply)](z: String)Base[T(in method apply)] <and> (a: Int)Base[T(in method apply)]
+ required: Base[T(in method test3)]
+ def test3[T]: Base[T] = Foo1[T]
+ ^
+t5553_1.scala:24: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (z: String)Base[T]
+and method apply in object Foo2 of type (a: Int)Base[T]
+match expected type ?
+ def test4[T] = Foo2[T]
+ ^
+t5553_1.scala:25: error: type mismatch;
+ found : [T](z: String)Base[T] <and> (a: Int)Base[T]
+ required: Int
+ def test5[T]: Int = Foo2[T]
+ ^
+t5553_1.scala:26: error: type mismatch;
+ found : [T(in method apply)](z: String)Base[T(in method apply)] <and> (a: Int)Base[T(in method apply)]
+ required: Base[T(in method test6)]
+ def test6[T]: Base[T] = Foo2[T]
+ ^
+t5553_1.scala:30: error: ambiguous reference to overloaded definition,
+both method apply in object Foo3 of type (z: String)String
+and method apply in object Foo3 of type (a: Int)Base[T]
+match expected type ?
+ def test7[T] = Foo3[T]
+ ^
+t5553_1.scala:31: error: type mismatch;
+ found : [T](z: String)String <and> (a: Int)Base[T]
+ required: String
+ def test8[T]: String = Foo3[T]
+ ^
+t5553_1.scala:32: error: type mismatch;
+ found : [T](z: String)String <and> (a: Int)Base[T]
+ required: Int
+ def test9[T]: Int = Foo3[T]
+ ^
+t5553_1.scala:33: error: type mismatch;
+ found : [T(in method apply)](z: String)String <and> (a: Int)Base[T(in method apply)]
+ required: Base[T(in method test10)]
+ def test10[T]: Base[T] = Foo3[T]
+ ^
+10 errors found
diff --git a/test/files/neg/t5553_1.scala b/test/files/neg/t5553_1.scala
new file mode 100644
index 0000000000..32d61ec852
--- /dev/null
+++ b/test/files/neg/t5553_1.scala
@@ -0,0 +1,34 @@
+class Base[T]
+
+object Foo1 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T](z: String): Base[T] = new Base[T]
+}
+
+object Foo2 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T](z: String="abc"): Base[T] = new Base[T]
+}
+
+object Foo3 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T](z: String="abc"): String = z
+}
+object Test {
+ def test1[T] = Foo1[T]
+ def test2[T]: Int = Foo1[T]
+ def test3[T]: Base[T] = Foo1[T]
+}
+
+object Test2 {
+ def test4[T] = Foo2[T]
+ def test5[T]: Int = Foo2[T]
+ def test6[T]: Base[T] = Foo2[T]
+}
+
+object Test3{
+ def test7[T] = Foo3[T]
+ def test8[T]: String = Foo3[T]
+ def test9[T]: Int = Foo3[T]
+ def test10[T]: Base[T] = Foo3[T]
+}
diff --git a/test/files/neg/t5553_2.check b/test/files/neg/t5553_2.check
new file mode 100644
index 0000000000..599fdb0523
--- /dev/null
+++ b/test/files/neg/t5553_2.check
@@ -0,0 +1,50 @@
+t5553_2.scala:27: error: type mismatch;
+ found : Base[T]
+ required: Int
+ def test4[T]: Int = Foo1[T](1)
+ ^
+t5553_2.scala:34: error: type mismatch;
+ found : String
+ required: Base[T]
+ def test7[T]: Base[T] = Foo2[T]
+ ^
+t5553_2.scala:35: error: type mismatch;
+ found : String
+ required: Int
+ def test8[T]: Int = Foo2[T]
+ ^
+t5553_2.scala:40: error: type mismatch;
+ found : String
+ required: Int
+ def test9[T]: Int = Foo3[T]
+ ^
+t5553_2.scala:41: error: type mismatch;
+ found : String
+ required: Base[T]
+ def test10[T]: Base[T] = Foo3[T]
+ ^
+t5553_2.scala:47: error: could not find implicit value for parameter z: String
+ def test13[T]: Int = Foo3[T]
+ ^
+t5553_2.scala:48: error: could not find implicit value for parameter z: String
+ def test14[T]: Base[T] = Foo3[T]
+ ^
+t5553_2.scala:49: error: could not find implicit value for parameter z: String
+ def test15[T]: String = Foo3[T]
+ ^
+t5553_2.scala:50: error: could not find implicit value for parameter z: String
+ def test16[T] = Foo3[T]
+ ^
+t5553_2.scala:54: error: ambiguous reference to overloaded definition,
+both method apply in object Foo4 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo4 of type (x: Int)Base[T]
+match argument types (Int)
+ def test17[T] = Foo4[T](1)
+ ^
+t5553_2.scala:55: error: ambiguous reference to overloaded definition,
+both method apply in object Foo4 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo4 of type (x: Int)Base[T]
+match argument types (Int) and expected result type Base[T]
+ def test18[T]: Base[T] = Foo4[T](1)
+ ^
+11 errors found
diff --git a/test/files/neg/t5553_2.scala b/test/files/neg/t5553_2.scala
new file mode 100644
index 0000000000..16958aec8e
--- /dev/null
+++ b/test/files/neg/t5553_2.scala
@@ -0,0 +1,59 @@
+class Base[T]
+
+object Foo1 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int, z: String="abc"): String = z
+}
+
+object Foo2 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T]: String = "abc"
+}
+
+object Foo3 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](implicit z: String): String = z
+}
+
+object Foo4 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int)(implicit z: String): Base[T] = new Base[T]
+}
+
+object Test1 {
+ def test1[T] = Foo1[T](1)
+ def test2[T]: String = Foo1[T](1)
+ def test3[T]: Base[T] = Foo1[T](1)
+ def test4[T]: Int = Foo1[T](1)
+
+}
+
+object Test2 {
+ def test5[T] = Foo2[T]
+ def test6[T]: String = Foo2[T]
+ def test7[T]: Base[T] = Foo2[T]
+ def test8[T]: Int = Foo2[T]
+}
+
+object Test3 {
+ implicit val v: String = "abc"
+ def test9[T]: Int = Foo3[T]
+ def test10[T]: Base[T] = Foo3[T]
+ def test11[T]: String = Foo3[T]
+ def test12[T] = Foo3[T]
+}
+
+object Test4 {
+ def test13[T]: Int = Foo3[T]
+ def test14[T]: Base[T] = Foo3[T]
+ def test15[T]: String = Foo3[T]
+ def test16[T] = Foo3[T]
+}
+
+object Test5 {
+ def test17[T] = Foo4[T](1)
+ def test18[T]: Base[T] = Foo4[T](1)
+ //def test19[T]: String = Foo4[T](1) // #5554
+}
+
+
diff --git a/test/files/neg/t5554.check b/test/files/neg/t5554.check
new file mode 100644
index 0000000000..8f657fd32f
--- /dev/null
+++ b/test/files/neg/t5554.check
@@ -0,0 +1,67 @@
+t5554.scala:14: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test1[T]: Int = Foo1[T](1)
+ ^
+t5554.scala:16: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test3[T]: String = Foo1[T](1)
+ ^
+t5554.scala:17: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test4[T] = Foo1[T](1)
+ ^
+t5554.scala:22: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test5[T]: Int = Foo1[T](1)
+ ^
+t5554.scala:25: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test8[T] = Foo1[T](1)
+ ^
+t5554.scala:29: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test9[T]: String = Foo2[T](1)
+ ^
+t5554.scala:30: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int) and expected result type Base[T]
+ def test10[T]: Base[T] = Foo2[T](1)
+ ^
+t5554.scala:31: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test11[T] = Foo2[T](1)
+ ^
+t5554.scala:36: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test12[T]: String = Foo2[T](1)
+ ^
+t5554.scala:37: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int) and expected result type Base[T]
+ def test13[T]: Base[T] = Foo2[T](1)
+ ^
+t5554.scala:38: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test14[T] = Foo2[T](1)
+ ^
+11 errors found
diff --git a/test/files/neg/t5554.scala b/test/files/neg/t5554.scala
new file mode 100644
index 0000000000..d279abea7f
--- /dev/null
+++ b/test/files/neg/t5554.scala
@@ -0,0 +1,39 @@
+class Base[T]
+
+object Foo1 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int)(implicit z: String): String = z
+}
+
+object Foo2 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int)(implicit z: String): Base[T] = new Base[T]
+}
+
+object Test1 {
+ def test1[T]: Int = Foo1[T](1)
+ def test2[T]: Base[T] = Foo1[T](1)
+ def test3[T]: String = Foo1[T](1)
+ def test4[T] = Foo1[T](1)
+}
+
+object Test2 {
+ implicit val v: String = "foo"
+ def test5[T]: Int = Foo1[T](1)
+ def test6[T]: Base[T] = Foo1[T](1)
+ def test7[T]: String = Foo1[T](1)
+ def test8[T] = Foo1[T](1)
+}
+
+object Test3 {
+ def test9[T]: String = Foo2[T](1)
+ def test10[T]: Base[T] = Foo2[T](1)
+ def test11[T] = Foo2[T](1)
+}
+
+object Test4 {
+ implicit val v: String = "foo"
+ def test12[T]: String = Foo2[T](1)
+ def test13[T]: Base[T] = Foo2[T](1)
+ def test14[T] = Foo2[T](1)
+}
diff --git a/test/files/neg/t591.check b/test/files/neg/t591.check
index 434c2dd002..5cdeebf079 100644
--- a/test/files/neg/t591.check
+++ b/test/files/neg/t591.check
@@ -1,4 +1,4 @@
-t591.scala:38: error: method input_= is defined twice
+t591.scala:38: error: method input_= is defined twice in t591.scala
def input_=(in : Input) = {}
^
one error found
diff --git a/test/files/neg/t800.check b/test/files/neg/t800.check
index 4bfba5420b..44c316a95b 100644
--- a/test/files/neg/t800.check
+++ b/test/files/neg/t800.check
@@ -1,13 +1,13 @@
t800.scala:4: error: qualification is already defined as value qualification
val qualification = false;
^
-t800.scala:8: error: method qualification is defined twice
+t800.scala:8: error: method qualification is defined twice in t800.scala
val qualification = false;
^
-t800.scala:12: error: value qualification is defined twice
+t800.scala:12: error: value qualification is defined twice in t800.scala
var qualification = false;
^
-t800.scala:16: error: method qualification is defined twice
+t800.scala:16: error: method qualification is defined twice in t800.scala
var qualification = false;
^
four errors found
diff --git a/test/files/neg/t935.check b/test/files/neg/t935.check
index af634a2630..8b73700187 100644
--- a/test/files/neg/t935.check
+++ b/test/files/neg/t935.check
@@ -4,4 +4,7 @@ t935.scala:7: error: type arguments [Test3.B] do not conform to class E's type p
t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
val b: String @E[B](new B) = "hi"
^
-two errors found
+t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
+ val b: String @E[B](new B) = "hi"
+ ^
+three errors found
diff --git a/test/files/neg/t960.check b/test/files/neg/t960.check
index d65b0cea93..603b1cb032 100644
--- a/test/files/neg/t960.check
+++ b/test/files/neg/t960.check
@@ -4,7 +4,10 @@ and method unapply in object List of type [a](xs: List[a])Option[(a, List[a])]
match argument types (List[a])
case List(x, xs) => 7
^
-t960.scala:12: error: method unapply is defined twice
+t960.scala:22: error: cannot resolve overloaded unapply
+ case List(x, xs) => 7
+ ^
+t960.scala:12: error: method unapply is defined twice in t960.scala
def unapply[a](xs: List[a]): Option[Null] = xs match {
^
-two errors found
+three errors found
diff --git a/test/files/neg/t960.scala b/test/files/neg/t960.scala
index 5101cf8433..36909626c1 100644
--- a/test/files/neg/t960.scala
+++ b/test/files/neg/t960.scala
@@ -17,4 +17,8 @@ object List {
def foo[a](xs: List[a]) = xs match {
case List(x, xs) => 7
}
+
+ def bar(xs: Any) = xs match { // test error message OverloadedUnapplyError
+ case List(x, xs) => 7
+ }
}
diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check
index 4f763a1c8d..a918858773 100644
--- a/test/files/neg/tailrec-2.check
+++ b/test/files/neg/tailrec-2.check
@@ -1,4 +1,7 @@
-tailrec-2.scala:6: error: could not optimize @tailrec annotated method f: it contains a recursive call targetting a supertype
+tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call targetting a supertype
@annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
^
-one error found
+tailrec-2.scala:9: error: @tailrec annotated method contains no recursive calls
+ @annotation.tailrec final def f1[B >: A](mem: List[B]): List[B] = this.g(mem)
+ ^
+two errors found
diff --git a/test/files/neg/tailrec-2.scala b/test/files/neg/tailrec-2.scala
index 342cd85323..9eb3af2f07 100644
--- a/test/files/neg/tailrec-2.scala
+++ b/test/files/neg/tailrec-2.scala
@@ -1,9 +1,12 @@
sealed abstract class Super[+A] {
def f[B >: A](mem: List[B]) : List[B]
+ def g(mem: List[_]) = ???
}
// This one should fail, target is a supertype
class Bop1[+A](val element: A) extends Super[A] {
+
@annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
+ @annotation.tailrec final def f1[B >: A](mem: List[B]): List[B] = this.g(mem)
}
// These succeed
class Bop2[+A](val element: A) extends Super[A] {
diff --git a/test/files/pos/Transactions.scala b/test/files/pos/Transactions.scala
index 9b4388300b..525eff7514 100644
--- a/test/files/pos/Transactions.scala
+++ b/test/files/pos/Transactions.scala
@@ -1,4 +1,4 @@
-package scala.concurrent
+package scala.concurrent1
class AbortException extends RuntimeException
diff --git a/test/pending/pos/existentials-harmful.scala b/test/files/pos/existentials-harmful.scala
index 8722852e8a..8722852e8a 100644
--- a/test/pending/pos/existentials-harmful.scala
+++ b/test/files/pos/existentials-harmful.scala
diff --git a/test/files/pos/existentials.scala b/test/files/pos/existentials.scala
new file mode 100644
index 0000000000..0adbc701a6
--- /dev/null
+++ b/test/files/pos/existentials.scala
@@ -0,0 +1,22 @@
+/** All of these should work, some don't yet.
+ * !!!
+ */
+class A {
+ def f() = { case class Bob(); Bob }
+
+ val quux0 = f()
+ def quux1 = f()
+ // lazy val quux2 = f()
+ // def quux3 = {
+ // lazy val quux3a = f()
+ // quux3a
+ // }
+
+ val bippy0 = f _
+ def bippy1 = f _
+ // lazy val bippy2 = f _
+ // val bippy3 = {
+ // lazy val bippy3a = f _
+ // bippy3a
+ // }
+}
diff --git a/test/pending/pos/local-objects.scala b/test/files/pos/local-objects.scala
index ed7c50ead9..ed7c50ead9 100644
--- a/test/pending/pos/local-objects.scala
+++ b/test/files/pos/local-objects.scala
diff --git a/test/pending/pos/package-case.scala b/test/files/pos/package-case.scala
index 906f1eb3f2..906f1eb3f2 100644
--- a/test/pending/pos/package-case.scala
+++ b/test/files/pos/package-case.scala
diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala
index 7bdcd072b2..5b6af67a74 100644
--- a/test/files/pos/spec-Function1.scala
+++ b/test/files/pos/spec-Function1.scala
@@ -8,7 +8,7 @@
// generated by genprod on Wed Apr 23 10:06:16 CEST 2008 (with fancy comment) (with extra methods)
-package scala
+package scalabip
/** <p>
diff --git a/test/files/pos/spec-groups.scala b/test/files/pos/spec-groups.scala
new file mode 100644
index 0000000000..9b6359a982
--- /dev/null
+++ b/test/files/pos/spec-groups.scala
@@ -0,0 +1,65 @@
+import Specializable._
+
+class A[@specialized(Primitives) T](x: T) {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class B[@specialized(Everything) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class C[@specialized(Bits32AndUp) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class D[@specialized(Integral) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class E[@specialized(AllNumeric) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class F[@specialized(BestOfBreed) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class G[@specialized(Byte, Double, AnyRef) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
diff --git a/test/files/pos/specialize10.scala b/test/files/pos/specialize10.scala
new file mode 100644
index 0000000000..bbe197cda2
--- /dev/null
+++ b/test/files/pos/specialize10.scala
@@ -0,0 +1,7 @@
+trait Bippy[@specialized(
+ scala.Char, scala.Boolean, scala.Byte,
+ scala.Short, scala.Int, scala.Long,
+ scala.Float, scala.Double, scala.Unit,
+ scala.AnyRef) T] { }
+
+trait Bippy2[@specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) T] { }
diff --git a/test/files/pos/spurious-overload.scala b/test/files/pos/spurious-overload.scala
new file mode 100644
index 0000000000..9767a44eee
--- /dev/null
+++ b/test/files/pos/spurious-overload.scala
@@ -0,0 +1,32 @@
+object Test extends App {
+ def foo(bar: Any) = bar
+
+ val code = foo{
+ object lazyLib {
+
+ def delay[A](value: => A): Susp[A] = new SuspImpl[A](value)
+
+ implicit def force[A](s: Susp[A]): A = s()
+
+ abstract class Susp[+A] extends Function0[A]
+
+ class SuspImpl[A](lazyValue: => A) extends Susp[A] {
+ private var maybeValue: Option[A] = None
+
+ override def apply() = maybeValue match {
+ case None =>
+ val value = lazyValue
+ maybeValue = Some(value)
+ value
+ case Some(value) =>
+ value
+ }
+ }
+ }
+
+ import lazyLib._
+
+ val s: Susp[Int] = delay { println("evaluating..."); 3 }
+ println("2 + s = " + (2 + s)) // implicit call to force()
+ }
+} \ No newline at end of file
diff --git a/test/pending/pos/t1957.scala b/test/files/pos/t1957.scala
index f80cf730ed..f80cf730ed 100644
--- a/test/pending/pos/t1957.scala
+++ b/test/files/pos/t1957.scala
diff --git a/test/pending/pos/t1987/a.scala b/test/files/pos/t1987b/a.scala
index ff27044b69..ff27044b69 100644
--- a/test/pending/pos/t1987/a.scala
+++ b/test/files/pos/t1987b/a.scala
diff --git a/test/pending/pos/t1987/b.scala b/test/files/pos/t1987b/b.scala
index a469ca6ea8..a469ca6ea8 100644
--- a/test/pending/pos/t1987/b.scala
+++ b/test/files/pos/t1987b/b.scala
diff --git a/test/pending/pos/t2194.scala b/test/files/pos/t2194.scala
index e87be509d1..e87be509d1 100644
--- a/test/pending/pos/t2194.scala
+++ b/test/files/pos/t2194.scala
diff --git a/test/files/pos/t531.scala b/test/files/pos/t531.scala
index 02763e08f1..856926de4f 100644
--- a/test/files/pos/t531.scala
+++ b/test/files/pos/t531.scala
@@ -2,9 +2,9 @@ object Test extends App {
import scala.reflect._;
def titi = {
var truc = 0
- val tata: Code[()=>Unit] = () => {
+ val tata = Code.lift{() => {
truc = 6
- }
+ }}
()
}
}
diff --git a/test/files/pos/t532.scala b/test/files/pos/t532.scala
index 32649b1629..f864bbf45e 100644
--- a/test/files/pos/t532.scala
+++ b/test/files/pos/t532.scala
@@ -2,9 +2,9 @@ object Test extends App {
import scala.reflect._;
def titi: Unit = {
var truc = 0
- val tata: Code[()=>Unit] = () => {
+ val tata = Code.lift{() => {
truc = truc + 6
- }
+ }}
()
}
}
diff --git a/test/files/pos/t5406.scala b/test/files/pos/t5406.scala
new file mode 100644
index 0000000000..c2e42c0ac3
--- /dev/null
+++ b/test/files/pos/t5406.scala
@@ -0,0 +1,4 @@
+object Wuffles { }
+object Test {
+ def f = (Some(Wuffles): Option[Wuffles.type]) match { case Some(Wuffles) => println("Woof"); case _ => println("Meow") }
+}
diff --git a/test/files/pos/t5444.scala b/test/files/pos/t5444.scala
new file mode 100644
index 0000000000..df6b2ce4f8
--- /dev/null
+++ b/test/files/pos/t5444.scala
@@ -0,0 +1,42 @@
+// /scala/trac/5444/a.scala
+// Mon Feb 13 21:01:45 PST 2012
+
+// Traits require identical names to reproduce.
+class Test {
+ def a() = {
+ trait T {
+ def x() = 1
+ }
+ trait U {
+ def x1() = 2
+ }
+ class Bippy extends T with U { def z() = x() + x1() }
+ new Bippy
+ }
+ def b() {
+ trait T {
+ def y() = 3
+ trait T2 {
+ def yy() = 10
+ }
+ }
+ trait U {
+ def y1() = 4
+ trait T3 {
+ def yy() = 11
+ }
+ }
+ class Bippy extends T with U { def z() = y() + y1() + (1 to (new T2 { }).yy()).map(_ + 1).sum }
+ (new Bippy).z()
+ }
+ def c() {
+ trait T {
+ def z() = 5
+ }
+ trait U {
+ def z1() = 6
+ }
+ (new Test with T with U).z1()
+ }
+}
+
diff --git a/test/files/pos/t5541.scala b/test/files/pos/t5541.scala
new file mode 100644
index 0000000000..39682a2fff
--- /dev/null
+++ b/test/files/pos/t5541.scala
@@ -0,0 +1,61 @@
+package philips.adolf.paul
+
+trait Sys[ S <: Sys[ S ]] {
+ type Tx
+}
+
+object HASkipList {
+ sealed trait NodeLike[ S <: Sys[ S ], @specialized( Int ) A ] {
+ def size : Int
+ def key( i: Int ): A
+ }
+ sealed trait Node[ S <: Sys[ S ], @specialized( Int ) A ] extends NodeLike[ S, A ] {
+ def isLeaf : Boolean
+ def isBranch : Boolean
+ def asBranch : Branch[ S, A ]
+ }
+ sealed trait BranchLike[ S <: Sys[ S ], @specialized( Int ) A ] extends NodeLike[ S, A ] {
+ def down( i: Int )( implicit tx: S#Tx ) : Node[ S, A ] = sys.error("")
+ }
+ sealed trait HeadOrBranch[ S <: Sys[ S ], A ]
+ final class Branch[ S <: Sys[ S ], @specialized( Int ) A ]()
+ extends BranchLike[ S, A ] with HeadOrBranch[ S, A ] with Node[ S, A ] {
+ def size:Int=1234
+ def key(i: Int):A=sys.error("TODO")
+ def isLeaf : Boolean = false
+ def isBranch : Boolean = true
+ def asBranch : Branch[ S, A ] = this
+ }
+}
+sealed trait HASkipList[ S <: Sys[ S ], @specialized( Int ) A ]
+
+class HASkipListView[ S <: Sys[ S ], A ]( private val l: HASkipList[ S, A ])( implicit system: S ) {
+ import HASkipList.Node
+ private def buildBoxMap( n: Node[ S, A ], isRight: Boolean )( implicit tx: S#Tx ) : (Box, NodeBox) = {
+ val sz = n.size
+ val szm = sz - 1
+ val keys = IndexedSeq.tabulate( sz ) { i =>
+ val key = n.key( i )
+ (key, if( isRight && i == szm ) "M" else key.toString)
+ }
+ val chbo = if( n.isLeaf ) None else {
+ val nb = n.asBranch
+ Some( IndexedSeq.tabulate( sz )( i => buildBoxMap( nb.down( i ), isRight && (i == szm) )))
+ }
+ val b = NodeBox( n, keys, chbo.map( _.map( _._2 )))
+ val bb = chbo match {
+ case Some( chbt ) =>
+ val chb = chbt.map( _._1 )
+ val h = Horiz( bs = chb )
+ Vert( bs = IndexedSeq[Box]( b, h ))
+ case None => b
+ }
+
+ (bb, b)
+ }
+
+ private trait Box
+ private case class Horiz( spacing: Int = 20, bs: IndexedSeq[ Box ]) extends Box
+ private final case class Vert( spacing: Int = 20, bs: IndexedSeq[ Box ]) extends Box
+ private final case class NodeBox( n: Node[ S, A ], keys: IndexedSeq[ (A, String) ], downs: Option[ IndexedSeq[ NodeBox ]]) extends Box
+}
diff --git a/test/files/pos/t5546.scala b/test/files/pos/t5546.scala
new file mode 100644
index 0000000000..4b0b0589b6
--- /dev/null
+++ b/test/files/pos/t5546.scala
@@ -0,0 +1 @@
+class A { def foo: Class[_ <: A] = getClass } \ No newline at end of file
diff --git a/test/files/presentation/shutdown-deadlock.check b/test/files/presentation/shutdown-deadlock.check
new file mode 100644
index 0000000000..ddcb4ff59b
--- /dev/null
+++ b/test/files/presentation/shutdown-deadlock.check
@@ -0,0 +1,3 @@
+reload: arrays.scala
+reload: arrays.scala
+No timeouts
diff --git a/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala b/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
new file mode 100644
index 0000000000..53af84541a
--- /dev/null
+++ b/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
@@ -0,0 +1,45 @@
+import scala.tools.nsc.interactive._
+import tests._
+
+object Test extends InteractiveTest {
+ val Reps = 30
+ import compiler._
+
+ def askSomething(): Response[Tree] = {
+ // println("*")
+ Thread.sleep(50)
+ ask { compiler.askStructure(true)(sourceFiles.head, _) }
+ }
+
+ def fireAsks() {
+ val jobs1 = for (i <- 1 until Reps) yield {
+ if (i % 10 == 0) {
+ askReload(sourceFiles)
+ }
+ askSomething
+ }
+
+ for ((j, i) <- jobs1.zipWithIndex) {
+ j.get(5000) match {
+ case None =>
+ println(i + ": TIMEOUT")
+ exit(1) // no need to delay the test any longer
+ case r =>
+ }
+ }
+ compiler.askShutdown()
+
+ println("No timeouts")
+ }
+
+ override def main(args: Array[String]) {
+ new Thread("Asking") {
+ override def run() {
+ fireAsks()
+ }
+ }.start()
+
+ Thread.sleep(800)
+ compiler.askShutdown()
+ }
+} \ No newline at end of file
diff --git a/test/files/presentation/shutdown-deadlock/src/arrays.scala b/test/files/presentation/shutdown-deadlock/src/arrays.scala
new file mode 100644
index 0000000000..ecebc78a6f
--- /dev/null
+++ b/test/files/presentation/shutdown-deadlock/src/arrays.scala
@@ -0,0 +1,937 @@
+//############################################################################
+// Arrays
+//############################################################################
+
+//############################################################################
+
+object Test {
+
+ //##########################################################################
+ // Types
+
+ type Strings = List[String]
+ type Map = scala.collection.Map[Int, Any]
+ type HashMap = scala.collection.mutable.HashMap[Int, Any]
+ type TreeMap = scala.collection.immutable.TreeMap[Int, Any]
+
+ //##########################################################################
+ // Identity Functions
+
+ def id_Ta_T[T <: Any ](x: T): T = x;
+ def id_Tr_T[T <: AnyRef ](x: T): T = x;
+ def id_To_T[T <: Object ](x: T): T = x;
+
+ def id_Ta_a[T <: Any ](x: T): Any = x;
+ def id_Tr_a[T <: AnyRef ](x: T): Any = x;
+ def id_To_a[T <: Object ](x: T): Any = x;
+
+ def id_Tr_r[T <: AnyRef ](x: T): AnyRef = x;
+ def id_To_r[T <: Object ](x: T): AnyRef = x;
+
+ def id_To_o[T <: Object ](x: T): Object = x;
+
+ def id_TSa_T [S <: Any , T <: Array[S]](x: T): T = x;
+ def id_TSv_T [S <: AnyVal , T <: Array[S]](x: T): T = x;
+ def id_TSr_T [S <: AnyRef , T <: Array[S]](x: T): T = x;
+ def id_TSo_T [S <: Object , T <: Array[S]](x: T): T = x;
+ def id_TSm_T [S <: Map , T <: Array[S]](x: T): T = x;
+ def id_TSn_T [S <: Strings, T <: Array[S]](x: T): T = x;
+
+ def id_TSa_Ss[S <: Any , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSv_Ss[S <: AnyVal , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSr_Ss[S <: AnyRef , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSo_Ss[S <: Object , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSm_Ss[S <: Map , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSn_Ss[S <: Strings, T <: Array[S]](x: T): Array[S] = x;
+
+ def id_TSa_a [S <: Any , T <: Array[S]](x: T): Any = x;
+ def id_TSv_a [S <: AnyVal , T <: Array[S]](x: T): Any = x;
+ def id_TSr_a [S <: AnyRef , T <: Array[S]](x: T): Any = x;
+ def id_TSo_a [S <: Object , T <: Array[S]](x: T): Any = x;
+ def id_TSm_a [S <: Map , T <: Array[S]](x: T): Any = x;
+ def id_TSn_a [S <: Strings, T <: Array[S]](x: T): Any = x;
+
+ def id_TSa_r [S <: Any , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSv_r [S <: AnyVal , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSr_r [S <: AnyRef , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSo_r [S <: Object , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSm_r [S <: Map , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSn_r [S <: Strings, T <: Array[S]](x: T): AnyRef = x;
+
+ def id_TSa_o [S <: Any , T <: Array[S]](x: T): Object = x;
+ def id_TSv_o [S <: AnyVal , T <: Array[S]](x: T): Object = x;
+ def id_TSr_o [S <: AnyRef , T <: Array[S]](x: T): Object = x;
+ def id_TSo_o [S <: Object , T <: Array[S]](x: T): Object = x;
+ def id_TSm_o [S <: Map , T <: Array[S]](x: T): Object = x;
+ def id_TSn_o [S <: Strings, T <: Array[S]](x: T): Object = x;
+
+ def id_Sas_Ss[S <: Any ](xs: Array[S]): Array[S] = xs;
+ def id_Svs_Ss[S <: AnyVal ](xs: Array[S]): Array[S] = xs;
+ def id_Srs_Ss[S <: AnyRef ](xs: Array[S]): Array[S] = xs;
+ def id_Sos_Ss[S <: Object ](xs: Array[S]): Array[S] = xs;
+ def id_Sms_Ss[S <: Map ](xs: Array[S]): Array[S] = xs;
+ def id_Sns_Ss[S <: Strings](xs: Array[S]): Array[S] = xs;
+
+ def id_Sas_a [S <: Any ](xs: Array[S]): Any = xs;
+ def id_Svs_a [S <: AnyVal ](xs: Array[S]): Any = xs;
+ def id_Srs_a [S <: AnyRef ](xs: Array[S]): Any = xs;
+ def id_Sos_a [S <: Object ](xs: Array[S]): Any = xs;
+ def id_Sms_a [S <: Map ](xs: Array[S]): Any = xs;
+ def id_Sns_a [S <: Strings](xs: Array[S]): Any = xs;
+
+ def id_Sas_r [S <: Any ](xs: Array[S]): AnyRef = xs;
+ def id_Svs_r [S <: AnyVal ](xs: Array[S]): AnyRef = xs;
+ def id_Srs_r [S <: AnyRef ](xs: Array[S]): AnyRef = xs;
+ def id_Sos_r [S <: Object ](xs: Array[S]): AnyRef = xs;
+ def id_Sms_r [S <: Map ](xs: Array[S]): AnyRef = xs;
+ def id_Sns_r [S <: Strings](xs: Array[S]): AnyRef = xs;
+
+ def id_Sas_o [S <: Any ](xs: Array[S]): Object = xs;
+ def id_Svs_o [S <: AnyVal ](xs: Array[S]): Object = xs;
+ def id_Srs_o [S <: AnyRef ](xs: Array[S]): Object = xs;
+ def id_Sos_o [S <: Object ](xs: Array[S]): Object = xs;
+ def id_Sms_o [S <: Map ](xs: Array[S]): Object = xs;
+ def id_Sns_o [S <: Strings](xs: Array[S]): Object = xs;
+
+ //##########################################################################
+ // Generic Checks
+
+ type Check[T] = Array[T] => Unit;
+
+ var checks: Int = 0;
+
+ def check(test0: Boolean, actual: Any, expected: Any) {
+ val test1: Boolean = actual == expected;
+ if (!test0 || !test1) {
+ val s0 = if (test0) "ok" else "KO";
+ val s1 = if (test1) "ok" else "KO";
+ val s2 = actual.toString();
+ val s3 = expected.toString();
+ error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
+ }
+ checks += 1
+ }
+
+ def check_Ta[T <: Any ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ c(xs);
+ }
+
+ def check_Tv[T <: AnyVal ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ check_Ta(xs, l, x0, c);
+ c(xs);
+ }
+
+ def check_Tr[T <: AnyRef ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ check_Ta(xs, l, x0, c);
+ c(xs);
+ }
+
+ def check_To[T <: Object ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ check_Ta(xs, l, x0, c);
+ check_Tr(xs, l, x0, c);
+ c(xs);
+ }
+
+ def check_Tm[T <: Map ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l)
+ check(xs(0) == x0, xs(0), x0)
+ check_Ta(xs, l, x0, c)
+ check_Tr(xs, l, x0, c)
+ check_To(xs, l, x0, c)
+ c(xs)
+ }
+
+ def check_Tn[T <: Strings](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l)
+ check(xs(0) == x0, xs(0), x0)
+ check_Ta(xs, l, x0, c)
+ check_Tr(xs, l, x0, c)
+ check_To(xs, l, x0, c)
+ c(xs)
+ }
+
+ def checkT2368() {
+ val arr = Array(1, 2, 3)
+ arr(0) += 1
+ assert(arr(0) == 2)
+ }
+
+ //##########################################################################
+ // Values
+
+ val u0: Unit = ();
+ val u1: Unit = ();
+
+ val z0: Boolean = false;
+ val z1: Boolean = true;
+
+ val b0: Byte = Byte.MinValue;
+ val b1: Byte = 1;
+ val b2: Byte = Byte.MaxValue;
+
+ val s0: Short = Short.MinValue;
+ val s1: Short = 2;
+ val s2: Short = Short.MaxValue;
+
+ val c0: Char = Char.MinValue;
+ val c1: Char = '3';
+ val c2: Char = Char.MaxValue;
+
+ val i0: Int = Int.MinValue;
+ val i1: Int = 4;
+ val i2: Int = Int.MinValue;
+
+ val l0: Long = Long.MinValue;
+ val l1: Int = 5;
+ val l2: Long = Long.MaxValue;
+
+ val f0: Float = Float.MinValue;
+ val f1: Int = 6;
+ val f2: Float = Float.MaxValue;
+
+ val d0: Double = Double.MinValue;
+ val d1: Int = 7;
+ val d2: Double = Double.MaxValue;
+
+ val a0: Unit = ();
+ val a1: Boolean = false;
+ val a2: Int = 0;
+ val a3: Null = null;
+ val a4: String = "a-z";
+ val a5: Symbol = 'token;
+ val a6: HashMap = new HashMap();
+ val a7: TreeMap = scala.collection.immutable.TreeMap.empty[Int, Any];
+ val a8: Strings = List("a", "z");
+
+ val v0: Unit = ();
+ val v1: Boolean = false;
+ val v2: Int = 0;
+ val v3: Long = l2;
+ val v4: Float = f2;
+ val v5: Double = d2;
+
+ val r0: Null = a3;
+ val r1: String = a4;
+ val r2: Symbol = a5;
+ val r3: HashMap = a6;
+ val r4: TreeMap = a7;
+ val r5: Strings = a8;
+
+ val o0: Null = r0;
+ val o1: String = r1;
+ val o2: Symbol = r2;
+ val o3: HashMap = r3;
+ val o4: TreeMap = r4;
+ val o5: Strings = r5;
+
+ val m0: Null = r0;
+ val m1: HashMap = r3;
+ val m2: TreeMap = r4;
+
+ val n0: Null = r0;
+ val n1: Strings = r5;
+ val n2: Nil.type= Nil;
+
+ //##########################################################################
+ // Specific Checks
+
+ def ucheck(xs: Array[Unit ]): Unit = {
+ check(xs.length == 2, xs.length, 2);
+ check(xs(0) == u0, xs(0), u0);
+ check(xs(1) == u1, xs(1), u1);
+ }
+
+ def zcheck(xs: Array[Boolean]): Unit = {
+ check(xs.length == 2, xs.length, 2);
+ check(xs(0) == z0, xs(0), z0);
+ check(xs(1) == z1, xs(1), z1);
+ }
+
+ def bcheck(xs: Array[Byte ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == b0, xs(0), b0);
+ check(xs(1) == b1, xs(1), b1);
+ check(xs(2) == b2, xs(2), b2);
+ }
+
+ def scheck(xs: Array[Short ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == s0, xs(0), s0);
+ check(xs(1) == s1, xs(1), s1);
+ check(xs(2) == s2, xs(2), s2);
+ }
+
+ def ccheck(xs: Array[Char ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == c0, xs(0), c0);
+ check(xs(1) == c1, xs(1), c1);
+ check(xs(2) == c2, xs(2), c2);
+ }
+
+ def icheck(xs: Array[Int ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == i0, xs(0), i0);
+ check(xs(1) == i1, xs(1), i1);
+ check(xs(2) == i2, xs(2), i2);
+ }
+
+ def lcheck(xs: Array[Long ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == l0, xs(0), l0);
+ check(xs(1) == l1, xs(1), l1: Long); // !!! : Long
+ check(xs(2) == l2, xs(2), l2);
+ }
+
+ def fcheck(xs: Array[Float ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == f0, xs(0), f0);
+ check(xs(1) == f1, xs(1), f1: Float); // !!! : Float
+ check(xs(2) == f2, xs(2), f2);
+ }
+
+ def dcheck(xs: Array[Double ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == d0, xs(0), d0);
+ check(xs(1) == d1, xs(1), d1: Double); // !!! : Double
+ check(xs(2) == d2, xs(2), d2);
+ }
+
+ def rcheck(xs: Array[AnyRef ]): Unit = {
+ check(xs.length == 6, xs.length, 6);
+ check(xs(0) == r0, xs(0), r0);
+ check(xs(1) == r1, xs(1), r1);
+ check(xs(2) == r2, xs(2), r2);
+ check(xs(3) == r3, xs(3), r3);
+ check(xs(4) == r4, xs(4), r4);
+ check(xs(5) == r5, xs(5), r5);
+ }
+
+ def ocheck(xs: Array[Object ]): Unit = {
+ check(xs.length == 6, xs.length, 6);
+ check(xs(0) == o0, xs(0), o0);
+ check(xs(1) == o1, xs(1), o1);
+ check(xs(2) == o2, xs(2), o2);
+ check(xs(3) == o3, xs(3), o3);
+ check(xs(4) == o4, xs(4), o4);
+ check(xs(5) == o5, xs(5), o5);
+ }
+
+ def mcheck(xs: Array[Map ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == m0, xs(0), m0);
+ check(xs(1) == m1, xs(1), m1);
+ check(xs(2) == m2, xs(2), m2);
+ }
+
+ def ncheck(xs: Array[Strings]) {
+ check(xs.length == 3, xs.length, 3)
+ check(xs(0) == n0, xs(0), n0)
+ check(xs(1) == n1, xs(1), n1)
+ check(xs(2) == n2, xs(2), n2)
+ }
+
+ //##########################################################################
+ // Miscellaneous checks
+
+ def checkZip {
+ val zipped = Array("a", "b", "c").zip(Array(1, 2))
+ val expected = Array(("a",1), ("b",2))
+ check(zipped sameElements expected, zipped.toList, expected.toList)
+ }
+
+ def checkConcat { // ticket #713
+ val x1 = Array.concat(Array(1, 2), Array(3, 4))
+ val y1 = Array(1, 2, 3, 4)
+ check(x1 sameElements y1, x1.toList, y1.toList)
+ }
+
+ //##########################################################################
+ // Arrays
+
+ val uarray: Array[Unit ] = Array(u0, u1);
+ val zarray: Array[Boolean] = Array(z0, z1);
+ val barray: Array[Byte ] = Array(b0, b1, b2);
+ val sarray: Array[Short ] = Array(s0, s1, s2);
+ val carray: Array[Char ] = Array(c0, c1, c2);
+ val iarray: Array[Int ] = Array(i0, i1, i2);
+ val larray: Array[Long ] = Array(l0, l1, l2);
+ val farray: Array[Float ] = Array(f0, f1, f2);
+ val darray: Array[Double ] = Array(d0, d1, d2);
+ val rarray: Array[AnyRef ] = Array(r0, r1, r2, r4, r4, r5);
+ val oarray: Array[Object ] = Array(o0, o1, o2, o4, o4, o5);
+ val marray: Array[Map ] = Array(m0, m1, m2);
+ val narray: Array[Strings] = Array(n0, n1, n2);
+
+ //##########################################################################
+ // Main
+
+ def main(args: Array[String]): Unit = {
+
+ //######################################################################
+
+ ucheck(uarray);
+ zcheck(zarray);
+ bcheck(barray);
+ scheck(sarray);
+ ccheck(carray);
+ icheck(iarray);
+ lcheck(larray);
+ fcheck(farray);
+ dcheck(darray);
+ rcheck(rarray);
+ ocheck(oarray);
+ mcheck(marray);
+ ncheck(narray);
+
+ //######################################################################
+
+ ucheck(id_Ta_T(uarray));
+ zcheck(id_Ta_T(zarray));
+ bcheck(id_Ta_T(barray));
+ scheck(id_Ta_T(sarray));
+ ccheck(id_Ta_T(carray));
+ icheck(id_Ta_T(iarray));
+ lcheck(id_Ta_T(larray));
+ fcheck(id_Ta_T(farray));
+ dcheck(id_Ta_T(darray));
+ rcheck(id_Ta_T(rarray));
+ ocheck(id_Ta_T(oarray));
+ mcheck(id_Ta_T(marray));
+ ncheck(id_Ta_T(narray));
+
+ ucheck(id_Tr_T(uarray));
+ zcheck(id_Tr_T(zarray));
+ bcheck(id_Tr_T(barray));
+ scheck(id_Tr_T(sarray));
+ ccheck(id_Tr_T(carray));
+ icheck(id_Tr_T(iarray));
+ lcheck(id_Tr_T(larray));
+ fcheck(id_Tr_T(farray));
+ dcheck(id_Tr_T(darray));
+ rcheck(id_Tr_T(rarray));
+ ocheck(id_Tr_T(oarray));
+ mcheck(id_Tr_T(marray));
+ ncheck(id_Tr_T(narray));
+
+ ucheck(id_To_T(uarray));
+ zcheck(id_To_T(zarray));
+ bcheck(id_To_T(barray));
+ scheck(id_To_T(sarray));
+ ccheck(id_To_T(carray));
+ icheck(id_To_T(iarray));
+ lcheck(id_To_T(larray));
+ fcheck(id_To_T(farray));
+ dcheck(id_To_T(darray));
+ rcheck(id_To_T(rarray));
+ ocheck(id_To_T(oarray));
+ mcheck(id_To_T(marray));
+ ncheck(id_To_T(narray));
+
+ ucheck(id_Ta_a(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_Ta_a(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_Ta_a(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_Ta_a(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_Ta_a(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_Ta_a(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_Ta_a(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_Ta_a(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_Ta_a(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_Ta_a(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_Ta_a(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_Ta_a(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_Ta_a(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_Tr_a(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_Tr_a(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_Tr_a(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_Tr_a(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_Tr_a(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_Tr_a(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_Tr_a(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_Tr_a(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_Tr_a(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_Tr_a(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_Tr_a(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_Tr_a(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_Tr_a(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_To_a(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_To_a(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_To_a(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_To_a(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_To_a(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_To_a(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_To_a(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_To_a(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_To_a(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_To_a(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_To_a(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_To_a(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_To_a(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_Tr_r(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_Tr_r(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_Tr_r(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_Tr_r(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_Tr_r(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_Tr_r(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_Tr_r(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_Tr_r(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_Tr_r(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_Tr_r(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_Tr_r(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_Tr_r(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_Tr_r(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_To_r(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_To_r(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_To_r(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_To_r(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_To_r(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_To_r(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_To_r(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_To_r(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_To_r(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_To_r(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_To_r(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_To_r(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_To_r(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_To_o(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_To_o(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_To_o(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_To_o(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_To_o(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_To_o(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_To_o(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_To_o(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_To_o(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_To_o(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_To_o(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_To_o(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_To_o(narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_T [Unit , Array[Unit ]](uarray));
+ zcheck(id_TSa_T [Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSa_T [Byte , Array[Byte ]](barray));
+ scheck(id_TSa_T [Short , Array[Short ]](sarray));
+ ccheck(id_TSa_T [Char , Array[Char ]](carray));
+ icheck(id_TSa_T [Int , Array[Int ]](iarray));
+ lcheck(id_TSa_T [Long , Array[Long ]](larray));
+ fcheck(id_TSa_T [Float , Array[Float ]](farray));
+ dcheck(id_TSa_T [Double , Array[Double ]](darray));
+ rcheck(id_TSa_T [AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSa_T [Object , Array[Object ]](oarray));
+ mcheck(id_TSa_T [Map , Array[Map ]](marray));
+ ncheck(id_TSa_T [Strings, Array[Strings]](narray));
+
+ ucheck(id_TSv_T [Unit , Array[Unit ]](uarray));
+ zcheck(id_TSv_T [Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSv_T [Byte , Array[Byte ]](barray));
+ scheck(id_TSv_T [Short , Array[Short ]](sarray));
+ ccheck(id_TSv_T [Char , Array[Char ]](carray));
+ icheck(id_TSv_T [Int , Array[Int ]](iarray));
+ lcheck(id_TSv_T [Long , Array[Long ]](larray));
+ fcheck(id_TSv_T [Float , Array[Float ]](farray));
+ dcheck(id_TSv_T [Double , Array[Double ]](darray));
+
+ rcheck(id_TSr_T [AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSr_T [Object , Array[Object ]](oarray));
+ mcheck(id_TSr_T [Map , Array[Map ]](marray));
+ ncheck(id_TSr_T [Strings, Array[Strings]](narray));
+
+ rcheck(id_TSo_T [AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSo_T [Object , Array[Object ]](oarray));
+ mcheck(id_TSo_T [Map , Array[Map ]](marray));
+ ncheck(id_TSo_T [Strings, Array[Strings]](narray));
+
+ mcheck(id_TSm_T [Map , Array[Map ]](marray));
+
+ ncheck(id_TSn_T [Strings, Array[Strings]](narray));
+
+ //######################################################################
+
+ ucheck(id_TSa_Ss[Unit , Array[Unit ]](uarray));
+ zcheck(id_TSa_Ss[Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSa_Ss[Byte , Array[Byte ]](barray));
+ scheck(id_TSa_Ss[Short , Array[Short ]](sarray));
+ ccheck(id_TSa_Ss[Char , Array[Char ]](carray));
+ icheck(id_TSa_Ss[Int , Array[Int ]](iarray));
+ lcheck(id_TSa_Ss[Long , Array[Long ]](larray));
+ fcheck(id_TSa_Ss[Float , Array[Float ]](farray));
+ dcheck(id_TSa_Ss[Double , Array[Double ]](darray));
+ rcheck(id_TSa_Ss[AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSa_Ss[Object , Array[Object ]](oarray));
+ mcheck(id_TSa_Ss[Map , Array[Map ]](marray));
+ ncheck(id_TSa_Ss[Strings, Array[Strings]](narray));
+
+ ucheck(id_TSv_Ss[Unit , Array[Unit ]](uarray));
+ zcheck(id_TSv_Ss[Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSv_Ss[Byte , Array[Byte ]](barray));
+ scheck(id_TSv_Ss[Short , Array[Short ]](sarray));
+ ccheck(id_TSv_Ss[Char , Array[Char ]](carray));
+ icheck(id_TSv_Ss[Int , Array[Int ]](iarray));
+ lcheck(id_TSv_Ss[Long , Array[Long ]](larray));
+ fcheck(id_TSv_Ss[Float , Array[Float ]](farray));
+ dcheck(id_TSv_Ss[Double , Array[Double ]](darray));
+
+ rcheck(id_TSr_Ss[AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSr_Ss[Object , Array[Object ]](oarray));
+ mcheck(id_TSr_Ss[Map , Array[Map ]](marray));
+ ncheck(id_TSr_Ss[Strings, Array[Strings]](narray));
+
+ rcheck(id_TSo_Ss[AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSo_Ss[Object , Array[Object ]](oarray));
+ mcheck(id_TSo_Ss[Map , Array[Map ]](marray));
+ ncheck(id_TSo_Ss[Strings, Array[Strings]](narray));
+
+ mcheck(id_TSm_Ss[Map , Array[Map ]](marray));
+
+ ncheck(id_TSn_Ss[Strings, Array[Strings]](narray));
+
+ //######################################################################
+
+ ucheck(id_TSa_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_Sas_Ss[Unit ](uarray));
+ zcheck(id_Sas_Ss[Boolean](zarray));
+ bcheck(id_Sas_Ss[Byte ](barray));
+ scheck(id_Sas_Ss[Short ](sarray));
+ ccheck(id_Sas_Ss[Char ](carray));
+ icheck(id_Sas_Ss[Int ](iarray));
+ lcheck(id_Sas_Ss[Long ](larray));
+ fcheck(id_Sas_Ss[Float ](farray));
+ dcheck(id_Sas_Ss[Double ](darray));
+ rcheck(id_Sas_Ss[AnyRef ](rarray));
+ ocheck(id_Sas_Ss[Object ](oarray));
+ mcheck(id_Sas_Ss[Map ](marray));
+ ncheck(id_Sas_Ss[Strings](narray));
+
+ ucheck(id_Svs_Ss[Unit ](uarray));
+ zcheck(id_Svs_Ss[Boolean](zarray));
+ bcheck(id_Svs_Ss[Byte ](barray));
+ scheck(id_Svs_Ss[Short ](sarray));
+ ccheck(id_Svs_Ss[Char ](carray));
+ icheck(id_Svs_Ss[Int ](iarray));
+ lcheck(id_Svs_Ss[Long ](larray));
+ fcheck(id_Svs_Ss[Float ](farray));
+ dcheck(id_Svs_Ss[Double ](darray));
+
+ rcheck(id_Srs_Ss[AnyRef ](rarray));
+ ocheck(id_Srs_Ss[Object ](oarray));
+ mcheck(id_Srs_Ss[Map ](marray));
+ ncheck(id_Srs_Ss[Strings](narray));
+
+ rcheck(id_Sos_Ss[AnyRef ](rarray));
+ ocheck(id_Sos_Ss[Object ](oarray));
+ mcheck(id_Sos_Ss[Map ](marray));
+ ncheck(id_Sos_Ss[Strings](narray));
+
+ mcheck(id_Sms_Ss[Map ](marray));
+
+ ncheck(id_Sns_Ss[Strings](narray));
+
+ //######################################################################
+
+ ucheck(id_TSa_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ check_Ta(uarray, 2, u0, ucheck)
+ check_Ta(zarray, 2, z0, zcheck)
+ check_Ta(barray, 3, b0, bcheck)
+ check_Ta(sarray, 3, s0, scheck)
+ check_Ta(carray, 3, c0, ccheck)
+ check_Ta(iarray, 3, i0, icheck)
+ check_Ta(larray, 3, l0, lcheck)
+ check_Ta(farray, 3, f0, fcheck)
+ check_Ta(darray, 3, d0, dcheck)
+ check_Ta(rarray, 6, r0, rcheck)
+ check_Ta(oarray, 6, o0, ocheck)
+ check_Ta(marray, 3, m0, mcheck)
+ check_Ta(narray, 3, n0, ncheck)
+
+ check_Tv(uarray, 2, u0, ucheck)
+ check_Tv(zarray, 2, z0, zcheck)
+ check_Tv(barray, 3, b0, bcheck)
+ check_Tv(sarray, 3, s0, scheck)
+ check_Tv(carray, 3, c0, ccheck)
+ check_Tv(iarray, 3, i0, icheck)
+ check_Tv(larray, 3, l0, lcheck)
+ check_Tv(farray, 3, f0, fcheck)
+ check_Tv(darray, 3, d0, dcheck)
+
+ check_Tr(rarray, 6, r0, rcheck)
+ check_Tr(oarray, 6, o0, ocheck)
+ check_Tr(marray, 3, m0, mcheck)
+ check_Tr(narray, 3, n0, ncheck)
+
+ check_To(rarray, 6, r0, rcheck)
+ check_To(oarray, 6, o0, ocheck)
+ check_To(marray, 3, m0, mcheck)
+ check_To(narray, 3, n0, ncheck)
+
+ check_Tm(marray, 3, m0, mcheck)
+
+ check_Tn(narray, 3, n0, ncheck)
+
+ //######################################################################
+
+ checkZip
+ checkConcat
+ checkT2368()
+
+ //######################################################################
+
+ println("checks: " + checks)
+
+ //######################################################################
+ }
+
+ //##########################################################################
+}
+
diff --git a/test/files/run/code.check b/test/files/run/code.check
deleted file mode 100644
index 9b0351bbf9..0000000000
--- a/test/files/run/code.check
+++ /dev/null
@@ -1,36 +0,0 @@
-testing: ((x: Int) => x.$plus(ys.length))
-type is: Int => Int
-result = ((x: Int) => x.+{(x: <?>)Int}(ys.length{Int}){Int}){Int => Int}
-evaluated = <function1>
-testing: (() => {
- val e: Element = new Element("someName");
- e
-})
-type is: () => Element
-result = (() => {
- val e: Element = new Element{Element}{(name: <?>)Element}("someName"{String("someName")}){Element};
- e{Element}
-}{Element}){() => Element}
-evaluated = Element(someName)
-testing: (() => truc.elem = 6)
-type is: () => Unit
-result = (() => truc.elem{Int} = 6{Int(6)}{Unit}){() => Unit}
-evaluated = null
-testing: (() => truc.elem = truc.elem.$plus(6))
-type is: () => Unit
-result = (() => truc.elem{Int} = truc.elem.+{(x: <?>)Int}(6{Int(6)}){Int}{Unit}){() => Unit}
-evaluated = null
-testing: (() => new baz.BazElement("someName"))
-type is: () => baz.BazElement
-result = (() => new baz.BazElement{baz.BazElement}{(name: <?>)baz.BazElement}("someName"{String("someName")}){baz.BazElement}){() => baz.BazElement}
-evaluated = BazElement(someName)
-testing: ((x: Int) => x.$plus(ys.length))
-type is: Int => Int
-result = ((x: Int) => x.+{(x: <?>)Int}(ys.length{Int}){Int}){Int => Int}
-evaluated = <function1>
-static: 2
-testing: (() => x.$plus(1))
-type is: () => Int
-result = (() => x.+{(x: <?>)Int}(1{Int(1)}){Int}){() => Int}
-evaluated = 2
-1+1 = 2
diff --git a/test/files/run/code.scala b/test/files/run/code.scala
deleted file mode 100644
index 162f796c63..0000000000
--- a/test/files/run/code.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-import scala.tools.partest.utils.CodeTest
-
-case class Element(name: String)
-
-object Test extends App {
- case class InnerElement(name: String)
- def foo[T](ys: List[T]) = {
- val fun: reflect.Code[Int => Int] = x => x + ys.length
- fun
- }
- CodeTest(foo(List(2)), args)
- CodeTest({() => val e = Element("someName"); e}, args)
-// CodeTest({() => val e = InnerElement("someName"); e}, args) // (does not work yet)
- def titi() = {
- var truc = 0
- CodeTest(() => {
- truc = 6
- }, args)
- }
- def tata(): Unit = {
- var truc = 0
- CodeTest(() => {
- truc = truc + 6
- }, args)
- }
- titi()
- tata()
- new baz.A(args)
-
- def show() {
- def foo[T](ys: List[T]) = {
- val fun: reflect.Code[Int => Int] = x => x + ys.length
- CodeTest(fun, args)
- }
- foo(List(1, 2, 3))
- }
-
- show()
-
- def evaltest(x: Int) = {
- CodeTest.static(() => x + 1, args)
- CodeTest(() => x + 1, args)
- }
-
- println("1+1 = "+evaltest(1))
-}
-
-
-package baz {
-
- case class BazElement(name: String) { }
-
- class A(args: Array[String]) {
- CodeTest(() => new baz.BazElement("someName"), args)
- }
-
-}
-
-
-
diff --git a/test/files/run/ctries/DumbHash.scala b/test/files/run/ctries/DumbHash.scala
new file mode 100644
index 0000000000..8ef325b67c
--- /dev/null
+++ b/test/files/run/ctries/DumbHash.scala
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+class DumbHash(val i: Int) {
+ override def equals(other: Any) = other match {
+ case that: DumbHash => that.i == this.i
+ case _ => false
+ }
+ override def hashCode = i % 5
+ override def toString = "DH(%s)".format(i)
+}
diff --git a/test/files/run/ctries/Wrap.scala b/test/files/run/ctries/Wrap.scala
new file mode 100644
index 0000000000..7b645c1612
--- /dev/null
+++ b/test/files/run/ctries/Wrap.scala
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i * 0x9e3775cd
+}
diff --git a/test/files/run/ctries/concmap.scala b/test/files/run/ctries/concmap.scala
new file mode 100644
index 0000000000..d73e33182a
--- /dev/null
+++ b/test/files/run/ctries/concmap.scala
@@ -0,0 +1,188 @@
+
+
+
+import collection.mutable.Ctrie
+
+
+object ConcurrentMapSpec extends Spec {
+
+ val initsz = 500
+ val secondsz = 750
+
+ def test() {
+ "support put" in {
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i))
+ }
+
+ "support put if absent" in {
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None)
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i))
+ }
+
+ "support remove if mapped to a specific value" in {
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false)
+ }
+
+ "support replace if mapped to a specific value" in {
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false)
+ for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false)
+ }
+
+ "support replace if present" in {
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i))
+ for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None)
+ }
+
+ def assertEqual(a: Any, b: Any) = {
+ if (a != b) println(a, b)
+ assert(a == b)
+ }
+
+ "support replace if mapped to a specific value, using several threads" in {
+ val ct = new Ctrie[Wrap, Int]
+ val sz = 55000
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ class Updater(index: Int, offs: Int) extends Thread {
+ override def run() {
+ var repeats = 0
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ var k = Int.MaxValue
+ do {
+ if (k != Int.MaxValue) repeats += 1
+ k = ct.lookup(new Wrap(j))
+ } while (!ct.replace(new Wrap(j), k, -k))
+ }
+ //println("Thread %d repeats: %d".format(index, repeats))
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i)
+
+ val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i)
+ threads2.foreach(_.start())
+ threads2.foreach(_.join())
+
+ for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i)
+ }
+
+ "support put if absent, several threads" in {
+ val ct = new Ctrie[Wrap, Int]
+ val sz = 110000
+
+ class Updater(offs: Int) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ ct.putIfAbsent(new Wrap(j), j)
+ assert(ct.lookup(new Wrap(j)) == j)
+ }
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assert(ct(new Wrap(i)) == i)
+ }
+
+ "support remove if mapped to a specific value, several threads" in {
+ val ct = new Ctrie[Wrap, Int]
+ val sz = 55000
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ class Remover(offs: Int) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ ct.remove(new Wrap(j), j)
+ assert(ct.get(new Wrap(j)) == None)
+ }
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None)
+ }
+
+ "have all or none of the elements depending on the oddity" in {
+ val ct = new Ctrie[Wrap, Int]
+ val sz = 65000
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ class Modifier(index: Int, offs: Int) extends Thread {
+ override def run() {
+ for (j <- 0 until sz) {
+ val i = (offs + j) % sz
+ var success = false
+ do {
+ if (ct.contains(new Wrap(i))) {
+ success = ct.remove(new Wrap(i)) != None
+ } else {
+ success = ct.putIfAbsent(new Wrap(i), i) == None
+ }
+ } while (!success)
+ }
+ }
+ }
+
+ def modify(n: Int) = {
+ val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+ }
+
+ modify(16)
+ for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i))
+ modify(15)
+ for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None)
+ }
+
+ "compute size correctly" in {
+ val ct = new Ctrie[Wrap, Int]
+ val sz = 36450
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ assertEqual(ct.size, sz)
+ assertEqual(ct.size, sz)
+ }
+
+ "compute size correctly in parallel" in {
+ val ct = new Ctrie[Wrap, Int]
+ val sz = 36450
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val pct = ct.par
+
+ assertEqual(pct.size, sz)
+ assertEqual(pct.size, sz)
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries/iterator.scala b/test/files/run/ctries/iterator.scala
new file mode 100644
index 0000000000..85a6ab7623
--- /dev/null
+++ b/test/files/run/ctries/iterator.scala
@@ -0,0 +1,289 @@
+
+
+
+
+import collection._
+import collection.mutable.Ctrie
+
+
+
+object IteratorSpec extends Spec {
+
+ def test() {
+ "work for an empty trie" in {
+ val ct = new Ctrie
+ val it = ct.iterator
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ }
+
+ def nonEmptyIteratorCheck(sz: Int) {
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ val it = ct.iterator
+ val tracker = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) {
+ assert(it.hasNext == true)
+ tracker += it.next
+ }
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ tracker.size shouldEqual (sz)
+ tracker shouldEqual (ct)
+ }
+
+ "work for a 1 element trie" in {
+ nonEmptyIteratorCheck(1)
+ }
+
+ "work for a 2 element trie" in {
+ nonEmptyIteratorCheck(2)
+ }
+
+ "work for a 3 element trie" in {
+ nonEmptyIteratorCheck(3)
+ }
+
+ "work for a 5 element trie" in {
+ nonEmptyIteratorCheck(5)
+ }
+
+ "work for a 10 element trie" in {
+ nonEmptyIteratorCheck(10)
+ }
+
+ "work for a 20 element trie" in {
+ nonEmptyIteratorCheck(20)
+ }
+
+ "work for a 50 element trie" in {
+ nonEmptyIteratorCheck(50)
+ }
+
+ "work for a 100 element trie" in {
+ nonEmptyIteratorCheck(100)
+ }
+
+ "work for a 1k element trie" in {
+ nonEmptyIteratorCheck(1000)
+ }
+
+ "work for a 5k element trie" in {
+ nonEmptyIteratorCheck(5000)
+ }
+
+ "work for a 75k element trie" in {
+ nonEmptyIteratorCheck(75000)
+ }
+
+ "work for a 250k element trie" in {
+ nonEmptyIteratorCheck(500000)
+ }
+
+ def nonEmptyCollideCheck(sz: Int) {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until sz) ct.put(new DumbHash(i), i)
+
+ val it = ct.iterator
+ val tracker = mutable.Map[DumbHash, Int]()
+ for (i <- 0 until sz) {
+ assert(it.hasNext == true)
+ tracker += it.next
+ }
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ tracker.size shouldEqual (sz)
+ tracker shouldEqual (ct)
+ }
+
+ "work for colliding hashcodes, 2 element trie" in {
+ nonEmptyCollideCheck(2)
+ }
+
+ "work for colliding hashcodes, 3 element trie" in {
+ nonEmptyCollideCheck(3)
+ }
+
+ "work for colliding hashcodes, 5 element trie" in {
+ nonEmptyCollideCheck(5)
+ }
+
+ "work for colliding hashcodes, 10 element trie" in {
+ nonEmptyCollideCheck(10)
+ }
+
+ "work for colliding hashcodes, 100 element trie" in {
+ nonEmptyCollideCheck(100)
+ }
+
+ "work for colliding hashcodes, 500 element trie" in {
+ nonEmptyCollideCheck(500)
+ }
+
+ "work for colliding hashcodes, 5k element trie" in {
+ nonEmptyCollideCheck(5000)
+ }
+
+ def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) {
+ if (a != b) {
+ println(a.size + " vs " + b.size)
+ // println(a)
+ // println(b)
+ // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
+ // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
+ }
+ assert(a == b)
+ }
+
+ "be consistent when taken with concurrent modifications" in {
+ val sz = 25000
+ val W = 15
+ val S = 5
+ val checks = 5
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ class Modifier extends Thread {
+ override def run() {
+ for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match {
+ case Some(_) => ct.remove(new Wrap(i))
+ case None =>
+ }
+ }
+ }
+
+ def consistentIteration(ct: Ctrie[Wrap, Int], checks: Int) {
+ class Iter extends Thread {
+ override def run() {
+ val snap = ct.readOnlySnapshot()
+ val initial = mutable.Map[Wrap, Int]()
+ for (kv <- snap) initial += kv
+
+ for (i <- 0 until checks) {
+ assertEqual(snap.iterator.toMap, initial)
+ }
+ }
+ }
+
+ val iter = new Iter
+ iter.start()
+ iter.join()
+ }
+
+ val threads = for (_ <- 0 until W) yield new Modifier
+ threads.foreach(_.start())
+ for (_ <- 0 until S) consistentIteration(ct, checks)
+ threads.foreach(_.join())
+ }
+
+ "be consistent with a concurrent removal with a well defined order" in {
+ val sz = 150000
+ val sgroupsize = 10
+ val sgroupnum = 5
+ val removerslowdown = 50
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ class Remover extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(ct.remove(new Wrap(i)) == Some(i))
+ for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate
+ }
+ //println("done removing")
+ }
+ }
+
+ def consistentIteration(it: Iterator[(Wrap, Int)]) = {
+ class Iter extends Thread {
+ override def run() {
+ val elems = it.toBuffer
+ if (elems.nonEmpty) {
+ val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i
+ assert(elems.forall(_._1.i >= minelem))
+ }
+ }
+ }
+ new Iter
+ }
+
+ val remover = new Remover
+ remover.start()
+ for (_ <- 0 until sgroupnum) {
+ val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator)
+ iters.foreach(_.start())
+ iters.foreach(_.join())
+ }
+ //println("done with iterators")
+ remover.join()
+ }
+
+ "be consistent with a concurrent insertion with a well defined order" in {
+ val sz = 150000
+ val sgroupsize = 10
+ val sgroupnum = 10
+ val inserterslowdown = 50
+ val ct = new Ctrie[Wrap, Int]
+
+ class Inserter extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(ct.put(new Wrap(i), i) == None)
+ for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate
+ }
+ //println("done inserting")
+ }
+ }
+
+ def consistentIteration(it: Iterator[(Wrap, Int)]) = {
+ class Iter extends Thread {
+ override def run() {
+ val elems = it.toSeq
+ if (elems.nonEmpty) {
+ val maxelem = elems.maxBy((x: (Wrap, Int)) => x._1.i)._1.i
+ assert(elems.forall(_._1.i <= maxelem))
+ }
+ }
+ }
+ new Iter
+ }
+
+ val inserter = new Inserter
+ inserter.start()
+ for (_ <- 0 until sgroupnum) {
+ val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator)
+ iters.foreach(_.start())
+ iters.foreach(_.join())
+ }
+ //println("done with iterators")
+ inserter.join()
+ }
+
+ "work on a yet unevaluated snapshot" in {
+ val sz = 50000
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ val snap = ct.snapshot()
+ val it = snap.iterator
+
+ while (it.hasNext) it.next()
+ }
+
+ "be duplicated" in {
+ val sz = 50
+ val ct = collection.parallel.mutable.ParCtrie((0 until sz) zip (0 until sz): _*)
+ val it = ct.splitter
+ for (_ <- 0 until (sz / 2)) it.next()
+ val dupit = it.dup
+
+ it.toList shouldEqual dupit.toList
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries/lnode.scala b/test/files/run/ctries/lnode.scala
new file mode 100644
index 0000000000..88cbeed1f6
--- /dev/null
+++ b/test/files/run/ctries/lnode.scala
@@ -0,0 +1,61 @@
+
+
+
+import collection.mutable.Ctrie
+
+
+object LNodeSpec extends Spec {
+
+ val initsz = 1500
+ val secondsz = 1750
+
+ def test() {
+ "accept elements with the same hash codes" in {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ }
+
+ "lookup elements with the same hash codes" in {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None)
+ }
+
+ "remove elements with the same hash codes" in {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ for (i <- 0 until initsz) {
+ val remelem = ct.remove(new DumbHash(i))
+ assert(remelem == Some(i), "removing " + i + " yields " + remelem)
+ }
+ for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None)
+ }
+
+ "put elements with the same hash codes if absent" in {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until initsz) ct.put(new DumbHash(i), i)
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None)
+ for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i)
+ }
+
+ "replace elements with the same hash codes" in {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
+ for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i)
+ for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true)
+ }
+
+ "remove elements with the same hash codes if mapped to a specific value" in {
+ val ct = new Ctrie[DumbHash, Int]
+ for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true)
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries/main.scala b/test/files/run/ctries/main.scala
new file mode 100644
index 0000000000..8db7fcef54
--- /dev/null
+++ b/test/files/run/ctries/main.scala
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ ConcurrentMapSpec.test()
+ IteratorSpec.test()
+ LNodeSpec.test()
+ SnapshotSpec.test()
+ }
+
+}
+
+
+trait Spec {
+
+ implicit def str2ops(s: String) = new {
+ def in[U](body: =>U) {
+ // just execute body
+ body
+ }
+ }
+
+ implicit def any2ops(a: Any) = new {
+ def shouldEqual(other: Any) = assert(a == other)
+ }
+
+ def evaluating[U](body: =>U) = new {
+ def shouldProduce[T <: Throwable: ClassManifest]() = {
+ var produced = false
+ try body
+ catch {
+ case e => if (e.getClass == implicitly[ClassManifest[T]].erasure) produced = true
+ } finally {
+ assert(produced, "Did not produce exception of type: " + implicitly[ClassManifest[T]])
+ }
+ }
+ }
+
+}
diff --git a/test/files/run/ctries/snapshot.scala b/test/files/run/ctries/snapshot.scala
new file mode 100644
index 0000000000..69073d3f06
--- /dev/null
+++ b/test/files/run/ctries/snapshot.scala
@@ -0,0 +1,267 @@
+
+
+
+
+import collection._
+import collection.mutable.Ctrie
+
+
+
+object SnapshotSpec extends Spec {
+
+ def test() {
+ "support snapshots" in {
+ val ctn = new Ctrie
+ ctn.snapshot()
+ ctn.readOnlySnapshot()
+
+ val ct = new Ctrie[Int, Int]
+ for (i <- 0 until 100) ct.put(i, i)
+ ct.snapshot()
+ ct.readOnlySnapshot()
+ }
+
+ "empty 2 quiescent snapshots in isolation" in {
+ val sz = 4000
+
+ class Worker(trie: Ctrie[Wrap, Int]) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(trie.remove(new Wrap(i)) == Some(i))
+ for (j <- 0 until sz)
+ if (j <= i) assert(trie.get(new Wrap(j)) == None)
+ else assert(trie.get(new Wrap(j)) == Some(j))
+ }
+ }
+ }
+
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+ val snapt = ct.snapshot()
+
+ val original = new Worker(ct)
+ val snapshot = new Worker(snapt)
+ original.start()
+ snapshot.start()
+ original.join()
+ snapshot.join()
+
+ for (i <- 0 until sz) {
+ assert(ct.get(new Wrap(i)) == None)
+ assert(snapt.get(new Wrap(i)) == None)
+ }
+ }
+
+ def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) {
+ @volatile var e: Exception = null
+
+ // reads possible entries once and stores them
+ // then reads all these N more times to check if the
+ // state stayed the same
+ class Reader(trie: Map[Wrap, Int]) extends Thread {
+ setName("Reader " + name)
+
+ override def run() =
+ try check()
+ catch {
+ case ex: Exception => e = ex
+ }
+
+ def check() {
+ val initial = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) trie.get(new Wrap(i)) match {
+ case Some(i) => initial.put(new Wrap(i), i)
+ case None => // do nothing
+ }
+
+ for (k <- 0 until N) {
+ for (i <- 0 until sz) {
+ val tres = trie.get(new Wrap(i))
+ val ires = initial.get(new Wrap(i))
+ if (tres != ires) println(i, "initially: " + ires, "traversal %d: %s".format(k, tres))
+ assert(tres == ires)
+ }
+ }
+ }
+ }
+
+ val reader = new Reader(readonly)
+ reader.start()
+ reader.join()
+
+ if (e ne null) {
+ e.printStackTrace()
+ throw e
+ }
+ }
+
+ // traverses the trie `rep` times and modifies each entry
+ class Modifier(trie: Ctrie[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
+ setName("Modifier %d".format(index))
+
+ override def run() {
+ for (k <- 0 until rep) {
+ for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match {
+ case Some(_) => trie.remove(new Wrap(i))
+ case None => // do nothing
+ }
+ }
+ }
+ }
+
+ // removes all the elements from the trie
+ class Remover(trie: Ctrie[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
+ setName("Remover %d".format(index))
+
+ override def run() {
+ for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz))
+ }
+ }
+
+ "have a consistent quiescent read-only snapshot" in {
+ val sz = 10000
+ val N = 100
+ val W = 10
+
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val readonly = ct.readOnlySnapshot()
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ consistentReadOnly("qm", readonly, sz, N)
+ threads.foreach(_.join())
+ }
+
+ // now, we check non-quiescent snapshots, as these permit situations
+ // where a thread is caught in the middle of the update when a snapshot is taken
+
+ "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in {
+ val sz = 1250
+ val W = 100
+ val S = 5000
+
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5)
+ threads.foreach(_.join())
+ }
+
+ "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in {
+ val sz = 1000
+ val N = 7000
+ val W = 10
+ val S = 7000
+
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5)
+ threads.foreach(_.join())
+ }
+
+ def consistentNonReadOnly(name: String, trie: Ctrie[Wrap, Int], sz: Int, N: Int) {
+ @volatile var e: Exception = null
+
+ // reads possible entries once and stores them
+ // then reads all these N more times to check if the
+ // state stayed the same
+ class Worker extends Thread {
+ setName("Worker " + name)
+
+ override def run() =
+ try check()
+ catch {
+ case ex: Exception => e = ex
+ }
+
+ def check() {
+ val initial = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) trie.get(new Wrap(i)) match {
+ case Some(i) => initial.put(new Wrap(i), i)
+ case None => // do nothing
+ }
+
+ for (k <- 0 until N) {
+ // modify
+ for ((key, value) <- initial) {
+ val oldv = if (k % 2 == 0) value else -value
+ val newv = -oldv
+ trie.replace(key, oldv, newv)
+ }
+
+ // check
+ for (i <- 0 until sz) if (initial.contains(new Wrap(i))) {
+ val expected = if (k % 2 == 0) -i else i
+ //println(trie.get(new Wrap(i)))
+ assert(trie.get(new Wrap(i)) == Some(expected))
+ } else {
+ assert(trie.get(new Wrap(i)) == None)
+ }
+ }
+ }
+ }
+
+ val worker = new Worker
+ worker.start()
+ worker.join()
+
+ if (e ne null) {
+ e.printStackTrace()
+ throw e
+ }
+ }
+
+ "have a consistent non-quiescent snapshot, concurrent with modifications" in {
+ val sz = 9000
+ val N = 1000
+ val W = 10
+ val S = 400
+
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) {
+ consistentReadOnly("non-qm", ct.snapshot(), sz, 5)
+ consistentNonReadOnly("non-qsnap", ct.snapshot(), sz, 5)
+ }
+ threads.foreach(_.join())
+ }
+
+ "work when many concurrent snapshots are taken, concurrent with modifications" in {
+ val sz = 12000
+ val W = 10
+ val S = 10
+ val modifytimes = 1200
+ val snaptimes = 600
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ class Snapshooter extends Thread {
+ setName("Snapshooter")
+ override def run() {
+ for (k <- 0 until snaptimes) {
+ val snap = ct.snapshot()
+ for (i <- 0 until sz) snap.remove(new Wrap(i))
+ for (i <- 0 until sz) assert(!snap.contains(new Wrap(i)))
+ }
+ }
+ }
+
+ val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz)
+ val shooters = for (i <- 0 until S) yield new Snapshooter
+ val threads = mods ++ shooters
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+ }
+
+ }
+
+}
diff --git a/test/files/run/elidable-opt.check b/test/files/run/elidable-opt.check
new file mode 100644
index 0000000000..88cf98e0d1
--- /dev/null
+++ b/test/files/run/elidable-opt.check
@@ -0,0 +1,14 @@
+Good for me, I was not elided. Test.f3
+Good for me, I was not elided. O.f3
+Good for me, I was not elided. C.f1
+Good for me, I was not elided. C.f2
+()
+false
+0
+0
+0
+0
+0
+0.0
+0.0
+null
diff --git a/test/files/run/elidable-opt.flags b/test/files/run/elidable-opt.flags
new file mode 100644
index 0000000000..62897ff218
--- /dev/null
+++ b/test/files/run/elidable-opt.flags
@@ -0,0 +1 @@
+-optimise -Xelide-below 900
diff --git a/test/files/run/elidable-opt.scala b/test/files/run/elidable-opt.scala
new file mode 100644
index 0000000000..a2f29d2caf
--- /dev/null
+++ b/test/files/run/elidable-opt.scala
@@ -0,0 +1,85 @@
+import annotation._
+import elidable._
+
+trait T {
+ @elidable(FINEST) def f1()
+ @elidable(SEVERE) def f2()
+ @elidable(FINEST) def f3() = assert(false, "Should have been elided.")
+ def f4()
+}
+
+class C extends T {
+ def f1() = println("Good for me, I was not elided. C.f1")
+ def f2() = println("Good for me, I was not elided. C.f2")
+ @elidable(FINEST) def f4() = assert(false, "Should have been elided.")
+}
+
+object O {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+}
+
+object Test {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+
+ @elidable(FINEST) def f5() = {}
+ @elidable(FINEST) def f6() = true
+ @elidable(FINEST) def f7() = 1:Byte
+ @elidable(FINEST) def f8() = 1:Short
+ @elidable(FINEST) def f9() = 1:Char
+ @elidable(FINEST) def fa() = 1
+ @elidable(FINEST) def fb() = 1l
+ @elidable(FINEST) def fc() = 1.0f
+ @elidable(FINEST) def fd() = 1.0
+ @elidable(FINEST) def fe() = "s"
+
+ def main(args: Array[String]): Unit = {
+ f1()
+ f2()
+ f3()
+ f4
+ O.f1()
+ O.f2()
+ O.f3()
+ O.f4
+
+ val c = new C
+ c.f1()
+ c.f2()
+ c.f3()
+ c.f4()
+
+ // make sure a return value is still available when eliding a call
+ println(f5())
+ println(f6())
+ println(f7())
+ println(f8())
+ println(f9().toInt)
+ println(fa())
+ println(fb())
+ println(fc())
+ println(fd())
+ println(fe())
+
+ // this one won't show up in the output because a call to f1 is elidable when accessed through T
+ (c:T).f1()
+
+ // Test whether the method definitions are still available.
+ List("Test", "Test$", "O", "O$", "C", "T") foreach { className =>
+ List("f1", "f2", "f3", "f4") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ List("Test", "Test$") foreach { className =>
+ List("f5", "f6", "f7", "f8", "f9", "fa", "fb", "fc", "fd", "fe") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ Class.forName("T$class").getMethod("f3", classOf[T])
+ }
+}
diff --git a/test/files/run/elidable.check b/test/files/run/elidable.check
index 4ce04f0040..88cf98e0d1 100644
--- a/test/files/run/elidable.check
+++ b/test/files/run/elidable.check
@@ -1 +1,14 @@
-Good for me, I was not elided.
+Good for me, I was not elided. Test.f3
+Good for me, I was not elided. O.f3
+Good for me, I was not elided. C.f1
+Good for me, I was not elided. C.f2
+()
+false
+0
+0
+0
+0
+0
+0.0
+0.0
+null
diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala
index 264efbad59..a2f29d2caf 100644
--- a/test/files/run/elidable.scala
+++ b/test/files/run/elidable.scala
@@ -1,16 +1,85 @@
import annotation._
import elidable._
+trait T {
+ @elidable(FINEST) def f1()
+ @elidable(SEVERE) def f2()
+ @elidable(FINEST) def f3() = assert(false, "Should have been elided.")
+ def f4()
+}
+
+class C extends T {
+ def f1() = println("Good for me, I was not elided. C.f1")
+ def f2() = println("Good for me, I was not elided. C.f2")
+ @elidable(FINEST) def f4() = assert(false, "Should have been elided.")
+}
+
+object O {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+}
+
object Test {
@elidable(FINEST) def f1() = assert(false, "Should have been elided.")
@elidable(INFO) def f2() = assert(false, "Should have been elided.")
- @elidable(SEVERE) def f3() = println("Good for me, I was not elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3")
@elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
-
+
+ @elidable(FINEST) def f5() = {}
+ @elidable(FINEST) def f6() = true
+ @elidable(FINEST) def f7() = 1:Byte
+ @elidable(FINEST) def f8() = 1:Short
+ @elidable(FINEST) def f9() = 1:Char
+ @elidable(FINEST) def fa() = 1
+ @elidable(FINEST) def fb() = 1l
+ @elidable(FINEST) def fc() = 1.0f
+ @elidable(FINEST) def fd() = 1.0
+ @elidable(FINEST) def fe() = "s"
+
def main(args: Array[String]): Unit = {
f1()
f2()
f3()
f4
+ O.f1()
+ O.f2()
+ O.f3()
+ O.f4
+
+ val c = new C
+ c.f1()
+ c.f2()
+ c.f3()
+ c.f4()
+
+ // make sure a return value is still available when eliding a call
+ println(f5())
+ println(f6())
+ println(f7())
+ println(f8())
+ println(f9().toInt)
+ println(fa())
+ println(fb())
+ println(fc())
+ println(fd())
+ println(fe())
+
+ // this one won't show up in the output because a call to f1 is elidable when accessed through T
+ (c:T).f1()
+
+ // Test whether the method definitions are still available.
+ List("Test", "Test$", "O", "O$", "C", "T") foreach { className =>
+ List("f1", "f2", "f3", "f4") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ List("Test", "Test$") foreach { className =>
+ List("f5", "f6", "f7", "f8", "f9", "fa", "fb", "fc", "fd", "fe") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ Class.forName("T$class").getMethod("f3", classOf[T])
}
}
diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check
new file mode 100644
index 0000000000..c8040a4cb1
--- /dev/null
+++ b/test/files/run/existentials-in-compiler.check
@@ -0,0 +1,156 @@
+abstract trait Bippy[A <: AnyRef,B] extends Object
+ extest.Bippy[_ <: AnyRef, _]
+
+abstract trait BippyBud[A <: AnyRef,B,C <: List[A]] extends Object
+ extest.BippyBud[A,B,C] forSome { A <: AnyRef; B; C <: List[A] }
+
+abstract trait BippyLike[A <: AnyRef,B <: List[A],This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object
+ extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] }
+
+abstract trait Contra[-A >: AnyRef,-B] extends Object
+ extest.Contra[_ >: AnyRef, _]
+
+abstract trait ContraLike[-A >: AnyRef,-B >: List[A]] extends Object
+ extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] }
+
+abstract trait Cov01[+A <: AnyRef,+B] extends Object
+ extest.Cov01[_ <: AnyRef, _]
+
+abstract trait Cov02[+A <: AnyRef,B] extends Object
+ extest.Cov02[_ <: AnyRef, _]
+
+abstract trait Cov03[+A <: AnyRef,-B] extends Object
+ extest.Cov03[_ <: AnyRef, _]
+
+abstract trait Cov04[A <: AnyRef,+B] extends Object
+ extest.Cov04[_ <: AnyRef, _]
+
+abstract trait Cov05[A <: AnyRef,B] extends Object
+ extest.Cov05[_ <: AnyRef, _]
+
+abstract trait Cov06[A <: AnyRef,-B] extends Object
+ extest.Cov06[_ <: AnyRef, _]
+
+abstract trait Cov07[-A <: AnyRef,+B] extends Object
+ extest.Cov07[_ <: AnyRef, _]
+
+abstract trait Cov08[-A <: AnyRef,B] extends Object
+ extest.Cov08[_ <: AnyRef, _]
+
+abstract trait Cov09[-A <: AnyRef,-B] extends Object
+ extest.Cov09[_ <: AnyRef, _]
+
+abstract trait Cov11[+A <: AnyRef,+B <: List[_]] extends Object
+ extest.Cov11[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov12[+A <: AnyRef,B <: List[_]] extends Object
+ extest.Cov12[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov13[+A <: AnyRef,-B <: List[_]] extends Object
+ extest.Cov13[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov14[A <: AnyRef,+B <: List[_]] extends Object
+ extest.Cov14[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov15[A <: AnyRef,B <: List[_]] extends Object
+ extest.Cov15[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov16[A <: AnyRef,-B <: List[_]] extends Object
+ extest.Cov16[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov17[-A <: AnyRef,+B <: List[_]] extends Object
+ extest.Cov17[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov18[-A <: AnyRef,B <: List[_]] extends Object
+ extest.Cov18[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov19[-A <: AnyRef,-B <: List[_]] extends Object
+ extest.Cov19[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov21[+A,+B] extends Object
+ extest.Cov21[_, _]
+
+abstract trait Cov22[+A,B] extends Object
+ extest.Cov22[_, _]
+
+abstract trait Cov23[+A,-B] extends Object
+ extest.Cov23[_, _]
+
+abstract trait Cov24[A,+B] extends Object
+ extest.Cov24[_, _]
+
+abstract trait Cov25[A,B] extends Object
+ extest.Cov25[_, _]
+
+abstract trait Cov26[A,-B] extends Object
+ extest.Cov26[_, _]
+
+abstract trait Cov27[-A,+B] extends Object
+ extest.Cov27[_, _]
+
+abstract trait Cov28[-A,B] extends Object
+ extest.Cov28[_, _]
+
+abstract trait Cov29[-A,-B] extends Object
+ extest.Cov29[_, _]
+
+abstract trait Cov31[+A,+B,C <: (A, B)] extends Object
+ extest.Cov31[A,B,C] forSome { +A; +B; C <: (A, B) }
+
+abstract trait Cov32[+A,B,C <: (A, B)] extends Object
+ extest.Cov32[A,B,C] forSome { +A; B; C <: (A, B) }
+
+abstract trait Cov33[+A,-B,C <: (A, _$10) forSome { type _$10 }] extends Object
+ extest.Cov33[A,B,C] forSome { +A; -B; C <: (A, _$10) forSome { type _$10 } }
+
+abstract trait Cov34[A,+B,C <: (A, B)] extends Object
+ extest.Cov34[A,B,C] forSome { A; +B; C <: (A, B) }
+
+abstract trait Cov35[A,B,C <: (A, B)] extends Object
+ extest.Cov35[A,B,C] forSome { A; B; C <: (A, B) }
+
+abstract trait Cov36[A,-B,C <: (A, _$11) forSome { type _$11 }] extends Object
+ extest.Cov36[A,B,C] forSome { A; -B; C <: (A, _$11) forSome { type _$11 } }
+
+abstract trait Cov37[-A,+B,C <: (_$12, B) forSome { type _$12 }] extends Object
+ extest.Cov37[A,B,C] forSome { -A; +B; C <: (_$12, B) forSome { type _$12 } }
+
+abstract trait Cov38[-A,B,C <: (_$13, B) forSome { type _$13 }] extends Object
+ extest.Cov38[A,B,C] forSome { -A; B; C <: (_$13, B) forSome { type _$13 } }
+
+abstract trait Cov39[-A,-B,C <: Tuple2[_, _]] extends Object
+ extest.Cov39[_, _, _ <: Tuple2[_, _]]
+
+abstract trait Cov41[+A >: Null,+B] extends Object
+ extest.Cov41[_ >: Null, _]
+
+abstract trait Cov42[+A >: Null,B] extends Object
+ extest.Cov42[_ >: Null, _]
+
+abstract trait Cov43[+A >: Null,-B] extends Object
+ extest.Cov43[_ >: Null, _]
+
+abstract trait Cov44[A >: Null,+B] extends Object
+ extest.Cov44[_ >: Null, _]
+
+abstract trait Cov45[A >: Null,B] extends Object
+ extest.Cov45[_ >: Null, _]
+
+abstract trait Cov46[A >: Null,-B] extends Object
+ extest.Cov46[_ >: Null, _]
+
+abstract trait Cov47[-A >: Null,+B] extends Object
+ extest.Cov47[_ >: Null, _]
+
+abstract trait Cov48[-A >: Null,B] extends Object
+ extest.Cov48[_ >: Null, _]
+
+abstract trait Cov49[-A >: Null,-B] extends Object
+ extest.Cov49[_ >: Null, _]
+
+abstract trait Covariant[+A <: AnyRef,+B] extends Object
+ extest.Covariant[_ <: AnyRef, _]
+
+abstract trait CovariantLike[+A <: AnyRef,+B <: List[A],+This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object
+ extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] }
+
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
new file mode 100644
index 0000000000..e4f6920145
--- /dev/null
+++ b/test/files/run/existentials-in-compiler.scala
@@ -0,0 +1,83 @@
+import scala.tools.nsc._
+import scala.tools.partest.CompilerTest
+import scala.collection.{ mutable, immutable, generic }
+
+object Test extends CompilerTest {
+ import global._
+ import definitions._
+
+ def code = """
+package extest {
+ trait Bippy[A <: AnyRef, B] { } // wildcards
+ trait BippyLike[A <: AnyRef, B <: List[A], This <: BippyLike[A, B, This] with Bippy[A, B]] // no wildcards
+ trait BippyBud[A <: AnyRef, B, C <: List[A]]
+
+ trait Cov01[+A <: AnyRef, +B] { }
+ trait Cov02[+A <: AnyRef, B] { }
+ trait Cov03[+A <: AnyRef, -B] { }
+ trait Cov04[ A <: AnyRef, +B] { }
+ trait Cov05[ A <: AnyRef, B] { }
+ trait Cov06[ A <: AnyRef, -B] { }
+ trait Cov07[-A <: AnyRef, +B] { }
+ trait Cov08[-A <: AnyRef, B] { }
+ trait Cov09[-A <: AnyRef, -B] { }
+
+ trait Cov11[+A <: AnyRef, +B <: List[_]] { }
+ trait Cov12[+A <: AnyRef, B <: List[_]] { }
+ trait Cov13[+A <: AnyRef, -B <: List[_]] { }
+ trait Cov14[ A <: AnyRef, +B <: List[_]] { }
+ trait Cov15[ A <: AnyRef, B <: List[_]] { }
+ trait Cov16[ A <: AnyRef, -B <: List[_]] { }
+ trait Cov17[-A <: AnyRef, +B <: List[_]] { }
+ trait Cov18[-A <: AnyRef, B <: List[_]] { }
+ trait Cov19[-A <: AnyRef, -B <: List[_]] { }
+
+ trait Cov21[+A, +B] { }
+ trait Cov22[+A, B] { }
+ trait Cov23[+A, -B] { }
+ trait Cov24[ A, +B] { }
+ trait Cov25[ A, B] { }
+ trait Cov26[ A, -B] { }
+ trait Cov27[-A, +B] { }
+ trait Cov28[-A, B] { }
+ trait Cov29[-A, -B] { }
+
+ trait Cov31[+A, +B, C <: ((A, B))] { }
+ trait Cov32[+A, B, C <: ((A, B))] { }
+ trait Cov33[+A, -B, C <: ((A, _))] { }
+ trait Cov34[ A, +B, C <: ((A, B))] { }
+ trait Cov35[ A, B, C <: ((A, B))] { }
+ trait Cov36[ A, -B, C <: ((A, _))] { }
+ trait Cov37[-A, +B, C <: ((_, B))] { }
+ trait Cov38[-A, B, C <: ((_, B))] { }
+ trait Cov39[-A, -B, C <: ((_, _))] { }
+
+ trait Cov41[+A >: Null, +B] { }
+ trait Cov42[+A >: Null, B] { }
+ trait Cov43[+A >: Null, -B] { }
+ trait Cov44[ A >: Null, +B] { }
+ trait Cov45[ A >: Null, B] { }
+ trait Cov46[ A >: Null, -B] { }
+ trait Cov47[-A >: Null, +B] { }
+ trait Cov48[-A >: Null, B] { }
+ trait Cov49[-A >: Null, -B] { }
+
+ trait Covariant[+A <: AnyRef, +B] { }
+ trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: CovariantLike[A, B, This] with Covariant[A, B]]
+
+ trait Contra[-A >: AnyRef, -B] { }
+ trait ContraLike[-A >: AnyRef, -B >: List[A]]
+}
+ """
+
+ def check(source: String, unit: global.CompilationUnit) = {
+ getRequiredModule("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
+ afterTyper {
+ clazz.info
+ println(clazz.defString)
+ println(" " + classExistentialType(clazz) + "\n")
+ }
+ }
+ true
+ }
+}
diff --git a/test/files/run/existentials3.check b/test/files/run/existentials3.check
new file mode 100644
index 0000000000..41dc1f767c
--- /dev/null
+++ b/test/files/run/existentials3.check
@@ -0,0 +1,22 @@
+_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.ScalaObject with scala.Product with scala.Serializable] with scala.ScalaObject with scala.Serializable with java.lang.Object
+_ <: Object with Test$ToS with scala.ScalaObject with scala.Product with scala.Serializable
+Object with Test$ToS with scala.ScalaObject
+Object with Test$ToS with scala.ScalaObject
+Object with Test$ToS with scala.ScalaObject
+scala.Function0[Object with Test$ToS with scala.ScalaObject]
+scala.Function0[Object with Test$ToS with scala.ScalaObject]
+_ <: Object with _ <: Object with Object with Test$ToS with scala.ScalaObject
+_ <: Object with _ <: Object with _ <: Object with Test$ToS with scala.ScalaObject
+scala.collection.immutable.List[Object with scala.collection.Seq[Int] with scala.ScalaObject]
+scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int] with scala.ScalaObject]
+_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.ScalaObject with scala.Product with scala.Serializable] with scala.ScalaObject with scala.Serializable with java.lang.Object
+_ <: Object with Test$ToS with scala.ScalaObject with scala.Product with scala.Serializable
+Object with Test$ToS with scala.ScalaObject
+Object with Test$ToS with scala.ScalaObject
+Object with Test$ToS with scala.ScalaObject
+scala.Function0[Object with Test$ToS with scala.ScalaObject]
+scala.Function0[Object with Test$ToS with scala.ScalaObject]
+_ <: Object with _ <: Object with Object with Test$ToS with scala.ScalaObject
+_ <: Object with _ <: Object with _ <: Object with Test$ToS with scala.ScalaObject
+scala.collection.immutable.List[Object with scala.collection.Seq[Int] with scala.ScalaObject]
+scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int] with scala.ScalaObject]
diff --git a/test/files/run/existentials3.scala b/test/files/run/existentials3.scala
new file mode 100644
index 0000000000..bb80d366cc
--- /dev/null
+++ b/test/files/run/existentials3.scala
@@ -0,0 +1,73 @@
+object Test {
+ trait ToS { final override def toString = getClass.getName }
+
+ def f1 = { case class Bar() extends ToS; Bar }
+ def f2 = { case class Bar() extends ToS; Bar() }
+ def f3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
+ def f4 = { class Bar() extends ToS; new Bar() }
+ def f5 = { object Bar extends ToS; Bar }
+ def f6 = { () => { object Bar extends ToS ; Bar } }
+ def f7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
+
+ def f8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
+ def f9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
+
+ def f10 = { class A { type T1 } ; List[A#T1]() }
+ def f11 = { abstract class A extends Seq[Int] ; List[A]() }
+ def f12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
+
+ val g1 = { case class Bar() extends ToS; Bar }
+ val g2 = { case class Bar() extends ToS; Bar() }
+ val g3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
+ val g4 = { class Bar() extends ToS; new Bar() }
+ val g5 = { object Bar extends ToS; Bar }
+ val g6 = { () => { object Bar extends ToS ; Bar } }
+ val g7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
+
+ val g8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
+ val g9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
+
+ val g10 = { class A { type T1 } ; List[A#T1]() }
+ val g11 = { abstract class A extends Seq[Int] ; List[A]() }
+ val g12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
+
+ def m[T: Manifest](x: T) = println(manifest[T])
+
+ // manifests don't work for f10/g10
+ def main(args: Array[String]): Unit = {
+ m(f1)
+ m(f2)
+ m(f3)
+ m(f4)
+ m(f5)
+ m(f6)
+ m(f7)
+ m(f8)
+ m(f9)
+ // m(f10)
+ m(f11)
+ m(f12)
+ m(g1)
+ m(g2)
+ m(g3)
+ m(g4)
+ m(g5)
+ m(g6)
+ m(g7)
+ m(g8)
+ m(g9)
+ // m(g10)
+ m(g11)
+ m(g12)
+ }
+}
+
+object Misc {
+ trait Bippy { def bippy = "I'm Bippy!" }
+ object o1 {
+ def f1 = { trait A extends Seq[U forSome { type U <: Bippy }] ; abstract class B extends A ; trait C extends B ; (null: C) }
+ def f2 = f1.head.bippy
+ }
+ def g1 = o1.f1 _
+ def g2 = o1.f2 _
+}
diff --git a/test/files/run/interpolation.check b/test/files/run/interpolation.check
index 09579a800a..997abb4497 100644
--- a/test/files/run/interpolation.check
+++ b/test/files/run/interpolation.check
@@ -24,3 +24,9 @@ Best price: 13.345
Best price: 13.35
13.345% discount included
13.35% discount included
+
+0
+00
+
+0
+00
diff --git a/test/files/run/interpolation.scala b/test/files/run/interpolation.scala
index a0a185eaab..f443bd5feb 100644
--- a/test/files/run/interpolation.scala
+++ b/test/files/run/interpolation.scala
@@ -23,4 +23,10 @@ object Test extends App {
test2(10.0f)
test2(13.345f)
+ println(s"")
+ println(s"${0}")
+ println(s"${0}${0}")
+ println(f"")
+ println(f"${0}")
+ println(f"${0}${0}")
}
diff --git a/test/files/run/interpolationMultiline1.check b/test/files/run/interpolationMultiline1.check
new file mode 100644
index 0000000000..09579a800a
--- /dev/null
+++ b/test/files/run/interpolationMultiline1.check
@@ -0,0 +1,26 @@
+Bob is 1 years old
+Bob is 1 years old
+Bob will be 2 years old
+Bob will be 2 years old
+1+1 = 2
+1+1 = 2
+Bob is 12 years old
+Bob is 12 years old
+Bob will be 13 years old
+Bob will be 13 years old
+12+1 = 13
+12+1 = 13
+Bob is 123 years old
+Bob is 123 years old
+Bob will be 124 years old
+Bob will be 124 years old
+123+1 = 124
+123+1 = 124
+Best price: 10.0
+Best price: 10.00
+10.0% discount included
+10.00% discount included
+Best price: 13.345
+Best price: 13.35
+13.345% discount included
+13.35% discount included
diff --git a/test/files/run/interpolationMultiline1.flags b/test/files/run/interpolationMultiline1.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/run/interpolationMultiline1.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/run/interpolationMultiline1.scala b/test/files/run/interpolationMultiline1.scala
new file mode 100644
index 0000000000..437aed44b0
--- /dev/null
+++ b/test/files/run/interpolationMultiline1.scala
@@ -0,0 +1,26 @@
+object Test extends App {
+
+ def test1(n: Int) = {
+ println(s"""Bob is $n years old""")
+ println(f"""Bob is $n%2d years old""")
+ println(s"""Bob will be ${n+1} years old""")
+ println(f"""Bob will be ${n+1}%2d years old""")
+ println(s"""$n+1 = ${n+1}""")
+ println(f"""$n%d+1 = ${n+1}%d""")
+ }
+
+ def test2(f: Float) = {
+ println(s"""Best price: $f""")
+ println(f"""Best price: $f%.2f""")
+ println(s"""$f% discount included""")
+ println(f"""$f%3.2f% discount included""")
+ }
+
+ test1(1)
+ test1(12)
+ test1(123)
+
+ test2(10.0f)
+ test2(13.345f)
+
+}
diff --git a/test/files/run/interpolationMultiline2.check b/test/files/run/interpolationMultiline2.check
new file mode 100644
index 0000000000..2218c93a99
--- /dev/null
+++ b/test/files/run/interpolationMultiline2.check
@@ -0,0 +1,26 @@
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1%2d years old!
+Bob is 1 years old!
+Bob is 1%2d years old!
+===============
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12%2d years old!
+Bob is 12 years old!
+Bob is 12%2d years old!
+===============
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123%2d years old!
+Bob is 123 years old!
+Bob is 123%2d years old!
diff --git a/test/files/run/interpolationMultiline2.flags b/test/files/run/interpolationMultiline2.flags
new file mode 100644
index 0000000000..e1b37447c9
--- /dev/null
+++ b/test/files/run/interpolationMultiline2.flags
@@ -0,0 +1 @@
+-Xexperimental \ No newline at end of file
diff --git a/test/files/run/interpolationMultiline2.scala b/test/files/run/interpolationMultiline2.scala
new file mode 100644
index 0000000000..f6a682c3ce
--- /dev/null
+++ b/test/files/run/interpolationMultiline2.scala
@@ -0,0 +1,21 @@
+object Test extends App {
+
+ def test1(n: Int) = {
+ val old = "old"
+ try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ }
+
+ test1(1)
+ println("===============")
+ test1(12)
+ println("===============")
+ test1(123)
+
+}
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
new file mode 100644
index 0000000000..3461d1bf6b
--- /dev/null
+++ b/test/files/run/lub-visibility.check
@@ -0,0 +1,14 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
+
+scala> // but reverted that for SI-5534.
+
+scala> val x = List(List(), Vector())
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq{def dropRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def takeRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def drop(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def take(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]}]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.Ab...
+scala>
+
+scala>
diff --git a/test/files/run/lub-visibility.scala b/test/files/run/lub-visibility.scala
new file mode 100644
index 0000000000..8d5d3ae11a
--- /dev/null
+++ b/test/files/run/lub-visibility.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+object Test extends ReplTest {
+ def code = """
+ |// should infer List[scala.collection.immutable.Seq[Nothing]]
+ |// but reverted that for SI-5534.
+ |val x = List(List(), Vector())
+ """.stripMargin
+}
diff --git a/test/files/run/macro-basic.check b/test/files/run/macro-basic.check
new file mode 100644
index 0000000000..d434014897
--- /dev/null
+++ b/test/files/run/macro-basic.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/macro-basic.flags b/test/files/run/macro-basic.flags
new file mode 100644
index 0000000000..06a7b31f11
--- /dev/null
+++ b/test/files/run/macro-basic.flags
@@ -0,0 +1 @@
+-Xmacros
diff --git a/test/files/run/macro-basic/Macros_1.scala b/test/files/run/macro-basic/Macros_1.scala
new file mode 100644
index 0000000000..c2ea183abe
--- /dev/null
+++ b/test/files/run/macro-basic/Macros_1.scala
@@ -0,0 +1,10 @@
+object Macros {
+ object Shmacros {
+ def macro foo(x: Int): Int = x
+ }
+ def macro bar(x: Int): Int = x
+}
+
+class Macros {
+ def macro quux(x: Int): Int = x
+} \ No newline at end of file
diff --git a/test/files/run/macro-basic/Test_2.scala b/test/files/run/macro-basic/Test_2.scala
new file mode 100644
index 0000000000..e9a10e20c9
--- /dev/null
+++ b/test/files/run/macro-basic/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+} \ No newline at end of file
diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check
new file mode 100644
index 0000000000..f8f0d3ad29
--- /dev/null
+++ b/test/files/run/macro-repl-basic.check
@@ -0,0 +1,25 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> object Macros {
+ object Shmacros {
+ def macro foo(x: Int): Int = x
+ }
+ def macro bar(x: Int): Int = x
+}; class Macros {
+ def macro quux(x: Int): Int = x
+}
+defined module Macros
+defined class Macros
+
+scala>
+
+scala> import Macros.Shmacros._
+import Macros.Shmacros._
+
+scala> println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+10
+
+scala>
diff --git a/test/files/run/macro-repl-basic.scala b/test/files/run/macro-repl-basic.scala
new file mode 100644
index 0000000000..9b1a53343b
--- /dev/null
+++ b/test/files/run/macro-repl-basic.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xmacros"
+ def code = """
+ |object Macros {
+ | object Shmacros {
+ | def macro foo(x: Int): Int = x
+ | }
+ | def macro bar(x: Int): Int = x
+ |}; class Macros {
+ | def macro quux(x: Int): Int = x
+ |}
+ |
+ |import Macros.Shmacros._
+ |println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+ |""".stripMargin
+} \ No newline at end of file
diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check
new file mode 100644
index 0000000000..d2bb89b6f7
--- /dev/null
+++ b/test/files/run/macro-repl-dontexpand.check
@@ -0,0 +1,9 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def macro foo = ???
+foo: Any
+
+scala>
diff --git a/test/files/run/macro-repl-dontexpand.scala b/test/files/run/macro-repl-dontexpand.scala
new file mode 100644
index 0000000000..254bce894c
--- /dev/null
+++ b/test/files/run/macro-repl-dontexpand.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xmacros"
+ def code = """
+ |def macro foo = ???
+ |""".stripMargin
+} \ No newline at end of file
diff --git a/test/files/run/macro-rettype-mismatch.check b/test/files/run/macro-rettype-mismatch.check
new file mode 100644
index 0000000000..f6e4bc09fd
--- /dev/null
+++ b/test/files/run/macro-rettype-mismatch.check
@@ -0,0 +1,5 @@
+error: type mismatch;
+ found : Int(2)
+ required: String
+
+java.lang.Error: reflective compilation has failed
diff --git a/test/files/run/macro-rettype-mismatch.flags b/test/files/run/macro-rettype-mismatch.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/run/macro-rettype-mismatch.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/run/macro-rettype-mismatch/Macros_1.scala b/test/files/run/macro-rettype-mismatch/Macros_1.scala
new file mode 100644
index 0000000000..64e5b93468
--- /dev/null
+++ b/test/files/run/macro-rettype-mismatch/Macros_1.scala
@@ -0,0 +1,3 @@
+object Macros {
+ def macro foo(x: Int): String = x
+} \ No newline at end of file
diff --git a/test/files/run/macro-rettype-mismatch/Test_2.scala b/test/files/run/macro-rettype-mismatch/Test_2.scala
new file mode 100644
index 0000000000..39a7c7ad1a
--- /dev/null
+++ b/test/files/run/macro-rettype-mismatch/Test_2.scala
@@ -0,0 +1,16 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ import scala.reflect.mirror._
+ val tree = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(2))))
+
+ val stderr = new java.io.ByteArrayOutputStream()
+ Console.setErr(new java.io.PrintStream(stderr))
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ try { toolbox.runExpr(tree) }
+ catch { case ex: Throwable => println(stderr); println(ex) }
+}
diff --git a/test/files/run/manifests.scala b/test/files/run/manifests.scala
index 1da06b8aee..6b6ea80b34 100644
--- a/test/files/run/manifests.scala
+++ b/test/files/run/manifests.scala
@@ -46,11 +46,20 @@ object Test
def showsContravariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
new VarianceTester[T, U, CC](CONTRA) showsExpectedVariance
- def typeCompare[T, U](implicit ev1: Manifest[T], ev2: Manifest[U]) = (ev1 <:< ev2, ev2 <:< ev1) match {
- case (true, true) => SAME
- case (true, false) => SUB
- case (false, true) => SUPER
- case (false, false) => NONE
+ def typeCompare[T, U](implicit ev1: Manifest[T], ev2: Manifest[U]) = {
+ // checking types as well
+ if ((ev1 <:< ev2) != (ev1.tpe <:< ev2.tpe))
+ println("Failed! " + ((ev1, ev2)))
+
+ if ((ev2 <:< ev1) != (ev2.tpe <:< ev1.tpe))
+ println("Failed! " + ((ev2, ev1)))
+
+ (ev1 <:< ev2, ev2 <:< ev1) match {
+ case (true, true) => SAME
+ case (true, false) => SUB
+ case (false, true) => SUPER
+ case (false, false) => NONE
+ }
}
def assertAnyRef[T: Manifest] = List(
diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala
index 2b30e29bb3..7718b52f33 100644
--- a/test/files/run/reflection-implClass.scala
+++ b/test/files/run/reflection-implClass.scala
@@ -8,19 +8,19 @@
object Test extends App with Outer {
import scala.reflect.mirror
- assert(mirror.classToSymbol(manifest[Foo].erasure).typeSig.declaration(mirror.newTermName("bar")).typeSig ==
- mirror.classToSymbol(manifest[Bar].erasure).typeSig.declaration(mirror.newTermName("foo")).typeSig)
+ assert(mirror.classToSymbol(manifest[Foo].erasure).typeSignature.declaration(mirror.newTermName("bar")).typeSignature ==
+ mirror.classToSymbol(manifest[Bar].erasure).typeSignature.declaration(mirror.newTermName("foo")).typeSignature)
val s1 = implClass(manifest[Foo].erasure)
assert(s1 != mirror.NoSymbol)
- assert(s1.typeSig != mirror.NoType)
- assert(s1.companionModule.typeSig != mirror.NoType)
- assert(s1.companionModule.typeSig.declaration(mirror.newTermName("bar")) != mirror.NoSymbol)
+ assert(s1.typeSignature != mirror.NoType)
+ assert(s1.companionSymbol.typeSignature != mirror.NoType)
+ assert(s1.companionSymbol.typeSignature.declaration(mirror.newTermName("bar")) != mirror.NoSymbol)
val s2 = implClass(manifest[Bar].erasure)
assert(s2 != mirror.NoSymbol)
- assert(s2.typeSig != mirror.NoType)
- assert(s2.companionModule.typeSig != mirror.NoType)
- assert(s2.companionModule.typeSig.declaration(mirror.newTermName("foo")) != mirror.NoSymbol)
+ assert(s2.typeSignature != mirror.NoType)
+ assert(s2.companionSymbol.typeSignature != mirror.NoType)
+ assert(s2.companionSymbol.typeSignature.declaration(mirror.newTermName("foo")) != mirror.NoSymbol)
def implClass(clazz: Class[_]) = {
val implClass = Class.forName(clazz.getName + "$class")
mirror.classToSymbol(implClass)
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
new file mode 100644
index 0000000000..2822238706
--- /dev/null
+++ b/test/files/run/reify_ann1a.check
@@ -0,0 +1,30 @@
+{
+ @new ann(immutable.this.List.apply[String]("1a")) @new ann(immutable.this.List.apply[String]("1b")) class C[@new ann(immutable.this.List.apply[String]("2a")) @new ann(immutable.this.List.apply[String]("2b")) T>: Nothing <: Any] extends Object with ScalaObject {
+ @new ann(immutable.this.List.apply[String]("3a")) @new ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4a")) @ann(immutable.this.List.apply[String]("4b")) = _;
+ def <init>(@new ann(immutable.this.List.apply[String]("3a")) @new ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4a")) @ann(immutable.this.List.apply[String]("4b"))) = {
+ super.<init>();
+ ()
+ };
+ @new ann(immutable.this.List.apply[String]("5a")) @new ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
+ @new ann(immutable.this.List.apply[String]("7a")) @new ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.$plus(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
+ val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T>: Nothing <: Any] extends Object with ScalaObject {
+ @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
+ def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
+ @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
+ val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann1a.scala b/test/files/run/reify_ann1a.scala
new file mode 100644
index 0000000000..1ca170904b
--- /dev/null
+++ b/test/files/run/reify_ann1a.scala
@@ -0,0 +1,30 @@
+import scala.reflect._
+import scala.reflect.api._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+class ann(bar: List[String]) extends StaticAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = scala.reflect.Code.lift{
+ @ann(bar=List("1a")) @ann(bar=List("1b")) class C[@ann(bar=List("2a")) @ann(bar=List("2b")) T](@ann(bar=List("3a")) @ann(bar=List("3b")) x: T @ann(bar=List("4a")) @ann(bar=List("4b"))) {
+ @ann(bar=List("5a")) @ann(bar=List("5b")) def f(x: Int @ann(bar=List("6a")) @ann(bar=List("6b"))) = {
+ @ann(bar=List("7a")) @ann(bar=List("7b")) val r = (x + 3): @ann(bar=List("8a")) @ann(bar=List("8b"))
+ val s = 4: Int @ann(bar=List("9a")) @ann(bar=List("9b"))
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.runExpr(tree)
+} \ No newline at end of file
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
new file mode 100644
index 0000000000..e240e1e0ce
--- /dev/null
+++ b/test/files/run/reify_ann1b.check
@@ -0,0 +1,30 @@
+{
+ @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T>: Nothing <: Any] extends Object with ScalaObject {
+ @new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
+ def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
+ super.<init>();
+ ()
+ };
+ @new ann(bar = "5a") @new ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = {
+ @new ann(bar = "7a") @new ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.$plus(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a"));
+ val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a"));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T>: Nothing <: Any] extends Object with ScalaObject {
+ @ann(bar = "3a") @ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _;
+ def <init>(@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(bar = "5a") @ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = {
+ @ann(bar = "7a") @ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.+(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a"));
+ val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a"));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala
new file mode 100644
index 0000000000..9bdc712227
--- /dev/null
+++ b/test/files/run/reify_ann1b.scala
@@ -0,0 +1,30 @@
+import scala.reflect._
+import scala.reflect.api._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+class ann(bar: String) extends ClassfileAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = scala.reflect.Code.lift{
+ @ann(bar="1a") @ann(bar="1b") class C[@ann(bar="2a") @ann(bar="2b") T](@ann(bar="3a") @ann(bar="3b") x: T @ann(bar="4a") @ann(bar="4b")) {
+ @ann(bar="5a") @ann(bar="5b") def f(x: Int @ann(bar="6a") @ann(bar="6b")) = {
+ @ann(bar="7a") @ann(bar="7b") val r = (x + 3): @ann(bar="8a") @ann(bar="8b")
+ val s = 4: Int @ann(bar="9a") @ann(bar="9b")
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.runExpr(tree)
+} \ No newline at end of file
diff --git a/test/files/run/reify_anonymous.scala b/test/files/run/reify_anonymous.scala
index 1e7f3fe856..af16f2f8fd 100644
--- a/test/files/run/reify_anonymous.scala
+++ b/test/files/run/reify_anonymous.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check
new file mode 100644
index 0000000000..1773263a94
--- /dev/null
+++ b/test/files/run/reify_classfileann_a.check
@@ -0,0 +1,18 @@
+{
+ @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends Object with ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends Object with ScalaObject {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_classfileann_a.scala b/test/files/run/reify_classfileann_a.scala
new file mode 100644
index 0000000000..c77bd3b8a2
--- /dev/null
+++ b/test/files/run/reify_classfileann_a.scala
@@ -0,0 +1,24 @@
+import scala.reflect._
+import scala.reflect.api._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends ClassfileAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = scala.reflect.Code.lift{
+ @ann(bar="1", quux=Array("2", "3"), baz = new ann(bar = "4")) class C
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.runExpr(tree)
+} \ No newline at end of file
diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala
index 825a38dc1d..7cb3aff17d 100644
--- a/test/files/run/reify_closure1.scala
+++ b/test/files/run/reify_closure1.scala
@@ -4,14 +4,13 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
def foo[T](ys: List[T]): Int => Int = {
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x
- }
+ }}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala
index b88bec005d..cf367aa63f 100644
--- a/test/files/run/reify_closure2a.scala
+++ b/test/files/run/reify_closure2a.scala
@@ -4,14 +4,13 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
def foo(y: Int): Int => Int = {
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + y
- }
+ }}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala
index 6414fa58a3..d322b970b6 100644
--- a/test/files/run/reify_closure3a.scala
+++ b/test/files/run/reify_closure3a.scala
@@ -6,14 +6,13 @@ object Test extends App {
def foo(y: Int): Int => Int = {
def y1 = y
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + y1
- }
+ }}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala
index 99e9d82706..bbedd7e092 100644
--- a/test/files/run/reify_closure4a.scala
+++ b/test/files/run/reify_closure4a.scala
@@ -6,14 +6,13 @@ object Test extends App {
def foo(y: Int): Int => Int = {
val y1 = y
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + y1
- }
+ }}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala
index 0ac53d5479..193e18103a 100644
--- a/test/files/run/reify_closure5a.scala
+++ b/test/files/run/reify_closure5a.scala
@@ -4,14 +4,13 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
def foo[T](ys: List[T]): Int => Int = {
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + ys.length
- }
+ }}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala
index 54f1791bf2..6aff83cb94 100644
--- a/test/files/run/reify_closure6.scala
+++ b/test/files/run/reify_closure6.scala
@@ -7,18 +7,17 @@ object Test extends App {
def foo[T](ys: List[T]): Int => Int = {
val z = 1
var y = 0
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
y += 1
q += 1
println("q = " + q)
println("y = " + y)
x + ys.length * z + q + y
- }
+ }}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala
index 8933df23fa..46002d8d6c 100644
--- a/test/files/run/reify_closure7.scala
+++ b/test/files/run/reify_closure7.scala
@@ -8,19 +8,18 @@ object Test extends App {
def foo[T](ys: List[T]): Int => Int = {
val z = 1
var y = 0
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
y += 1
q += 1
println("q = " + q)
println("y = " + y)
x + ys.length * z + q + y
- }
+ }}
if (clo == null) {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun.tree)
clo = dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure8a.scala b/test/files/run/reify_closure8a.scala
index 5e54bfc8c7..805d8ff855 100644
--- a/test/files/run/reify_closure8a.scala
+++ b/test/files/run/reify_closure8a.scala
@@ -10,8 +10,7 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(10).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(10).fun.tree)
val foo = dyn.asInstanceOf[Int]
println(foo)
}
diff --git a/test/files/run/reify_closures10.scala b/test/files/run/reify_closures10.scala
index d0f895ae4d..b6ec8e8911 100644
--- a/test/files/run/reify_closures10.scala
+++ b/test/files/run/reify_closures10.scala
@@ -10,6 +10,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- println(toolbox.runExpr(ttree))
+ println(toolbox.runExpr(code.tree))
}
diff --git a/test/files/run/reify_complex.scala b/test/files/run/reify_complex.scala
index aae4d558cf..0d9aeb28c5 100644
--- a/test/files/run/reify_complex.scala
+++ b/test/files/run/reify_complex.scala
@@ -26,6 +26,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_extendbuiltins.scala b/test/files/run/reify_extendbuiltins.scala
index 57acd699ff..0aaec7cdf2 100644
--- a/test/files/run/reify_extendbuiltins.scala
+++ b/test/files/run/reify_extendbuiltins.scala
@@ -16,6 +16,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_for1.scala b/test/files/run/reify_for1.scala
index 4b03330293..d1b60d878b 100644
--- a/test/files/run/reify_for1.scala
+++ b/test/files/run/reify_for1.scala
@@ -11,6 +11,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_fors.check b/test/files/run/reify_fors.check
index eefddedc20..eefddedc20 100644
--- a/test/pending/run/reify_fors.check
+++ b/test/files/run/reify_fors.check
diff --git a/test/pending/run/reify_fors.scala b/test/files/run/reify_fors.scala
index f3556514a9..27ee85d18b 100644
--- a/test/pending/run/reify_fors.scala
+++ b/test/files/run/reify_fors.scala
@@ -23,7 +23,7 @@ object Test extends App {
/** Return an iterator over persons that are older than 20.
*/
def olderThan20(xs: Seq[Person]): Iterator[String] =
- olderThan20(xs.elements)
+ olderThan20(xs.iterator)
/** Return an iterator over persons older than 20, given
* an iterator over persons.
@@ -102,6 +102,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_generic.scala b/test/files/run/reify_generic.scala
index aef038b2d8..6a4ff148c4 100644
--- a/test/files/run/reify_generic.scala
+++ b/test/files/run/reify_generic.scala
@@ -10,6 +10,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_generic2.scala b/test/files/run/reify_generic2.scala
index d03fe7602b..9413f41eb5 100644
--- a/test/files/run/reify_generic2.scala
+++ b/test/files/run/reify_generic2.scala
@@ -11,6 +11,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_getter.scala b/test/files/run/reify_getter.scala
index 83eaded506..33f36888a7 100644
--- a/test/files/run/reify_getter.scala
+++ b/test/files/run/reify_getter.scala
@@ -13,7 +13,6 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- val evaluated = toolbox.runExpr(ttree)
+ val evaluated = toolbox.runExpr(code.tree)
println("evaluated = " + evaluated)
}
diff --git a/test/files/run/reify_implicits.scala b/test/files/run/reify_implicits.scala
index a15cef9c97..953eabe6c2 100644
--- a/test/files/run/reify_implicits.scala
+++ b/test/files/run/reify_implicits.scala
@@ -16,6 +16,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_inheritance.scala b/test/files/run/reify_inheritance.scala
index 2a1b5f764f..78a64c264e 100644
--- a/test/files/run/reify_inheritance.scala
+++ b/test/files/run/reify_inheritance.scala
@@ -18,6 +18,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_inner1.check b/test/files/run/reify_inner1.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/reify_inner1.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/reify_inner1.scala b/test/files/run/reify_inner1.scala
new file mode 100644
index 0000000000..546fe36d16
--- /dev/null
+++ b/test/files/run/reify_inner1.scala
@@ -0,0 +1,21 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ class C {
+ class D {
+ val x = 2
+ }
+ }
+
+ val outer = new C()
+ val inner = new outer.D()
+ println(inner.x)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/reify_inner2.check b/test/files/run/reify_inner2.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/reify_inner2.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/reify_inner2.scala b/test/files/run/reify_inner2.scala
new file mode 100644
index 0000000000..613614b989
--- /dev/null
+++ b/test/files/run/reify_inner2.scala
@@ -0,0 +1,21 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ class C {
+ object D {
+ val x = 2
+ }
+ }
+
+ val outer = new C()
+ val inner = outer.D
+ println(inner.x)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/reify_inner3.check b/test/files/run/reify_inner3.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/reify_inner3.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/reify_inner3.scala b/test/files/run/reify_inner3.scala
new file mode 100644
index 0000000000..e9fb636dce
--- /dev/null
+++ b/test/files/run/reify_inner3.scala
@@ -0,0 +1,21 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ object C {
+ class D {
+ val x = 2
+ }
+ }
+
+ val outer = C
+ val inner = new outer.D
+ println(inner.x)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/reify_inner4.check b/test/files/run/reify_inner4.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/reify_inner4.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/reify_inner4.scala b/test/files/run/reify_inner4.scala
new file mode 100644
index 0000000000..33870b0983
--- /dev/null
+++ b/test/files/run/reify_inner4.scala
@@ -0,0 +1,21 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ object C {
+ object D {
+ val x = 2
+ }
+ }
+
+ val outer = C
+ val inner = outer.D
+ println(inner.x)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/pending/run/reify_maps.check b/test/files/run/reify_maps.check
index 08cbbb1359..08cbbb1359 100644
--- a/test/pending/run/reify_maps.check
+++ b/test/files/run/reify_maps.check
diff --git a/test/pending/run/reify_maps.scala b/test/files/run/reify_maps.scala
index 589b28d049..d3d95ffa24 100644
--- a/test/pending/run/reify_maps.scala
+++ b/test/files/run/reify_maps.scala
@@ -21,6 +21,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index 30901b98c2..cd6052bc5e 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -14,11 +14,10 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter, args mkString " ")
- val ttree = toolbox.typeCheck(tree)
val output = new ByteArrayOutputStream()
Console.setOut(new PrintStream(output))
- val evaluated = toolbox.runExpr(ttree)
+ val evaluated = toolbox.runExpr(tree)
assert(output.toString() == "hello world", output.toString() +" == hello world")
diff --git a/test/files/run/reify_sort.scala b/test/files/run/reify_sort.scala
index 42991fe5d2..5984a64967 100644
--- a/test/files/run/reify_sort.scala
+++ b/test/files/run/reify_sort.scala
@@ -52,6 +52,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_sort1.scala b/test/files/run/reify_sort1.scala
index 42f4c824a5..6f365dea26 100644
--- a/test/files/run/reify_sort1.scala
+++ b/test/files/run/reify_sort1.scala
@@ -22,6 +22,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala
index 38ef72b6eb..ee1f116013 100644
--- a/test/files/run/reify_this.scala
+++ b/test/files/run/reify_this.scala
@@ -5,14 +5,13 @@ import scala.tools.nsc.Settings
import reflect.runtime.Mirror.ToolBox
trait Eval {
- def eval(code: Code[_]): Any = eval(code.tree)
+ def eval(code: Code): Any = eval(code.tree)
def eval(tree: Tree): Any = {
val settings = new Settings
val reporter = new ConsoleReporter(settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(tree)
}
}
diff --git a/test/pending/run/reify_timeofday.check b/test/files/run/reify_timeofday.check
index 3fd3b76a62..3fd3b76a62 100644
--- a/test/pending/run/reify_timeofday.check
+++ b/test/files/run/reify_timeofday.check
diff --git a/test/pending/run/reify_timeofday.scala b/test/files/run/reify_timeofday.scala
index 6bd11b0d30..122d7a6d52 100644
--- a/test/pending/run/reify_timeofday.scala
+++ b/test/files/run/reify_timeofday.scala
@@ -43,6 +43,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/reify_varargs.scala b/test/files/run/reify_varargs.scala
index d38cbf2aac..175cfb5db0 100644
--- a/test/files/run/reify_varargs.scala
+++ b/test/files/run/reify_varargs.scala
@@ -12,6 +12,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/spec-nlreturn.check b/test/files/run/spec-nlreturn.check
new file mode 100644
index 0000000000..26cff07360
--- /dev/null
+++ b/test/files/run/spec-nlreturn.check
@@ -0,0 +1,2 @@
+scala.runtime.NonLocalReturnControl$mcI$sp
+16
diff --git a/test/files/run/spec-nlreturn.scala b/test/files/run/spec-nlreturn.scala
new file mode 100644
index 0000000000..ec5e7229d3
--- /dev/null
+++ b/test/files/run/spec-nlreturn.scala
@@ -0,0 +1,16 @@
+object Test {
+ def f(): Int = {
+ try {
+ val g = 1 to 10 map { i => return 16 ; i } sum;
+ g
+ }
+ catch { case x: runtime.NonLocalReturnControl[_] =>
+ println(x.getClass.getName)
+ x.value.asInstanceOf[Int]
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f())
+ }
+}
diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check
new file mode 100644
index 0000000000..dd9f4ef424
--- /dev/null
+++ b/test/files/run/synchronized.check
@@ -0,0 +1,128 @@
+ .|. c1.f1: OK
+ .|. c1.fi: OK
+ .|... c1.fv: OK
+ .|... c1.ff: OK
+ .|. c1.fl: OK
+ .|... c1.fo: OK
+ |.. c1.g1: OK
+ |.. c1.gi: OK
+ |.... c1.gv: OK
+ |..... c1.gf: OK
+ .|... c1.c.f1: OK
+ .|... c1.c.fi: OK
+ .|..... c1.c.fv: OK
+ .|..... c1.c.ff: OK
+ .|... c1.c.fl: OK
+ .|..... c1.c.fo: OK
+ .|... c1.c.fn: OK
+ |.... c1.c.g1: OK
+ |.... c1.c.gi: OK
+ |...... c1.c.gv: OK
+ |...... c1.c.gf: OK
+ .|... c1.O.f1: OK
+ .|... c1.O.fi: OK
+ .|..... c1.O.fv: OK
+ .|..... c1.O.ff: OK
+ .|... c1.O.fl: OK
+ .|..... c1.O.fo: OK
+ .|... c1.O.fn: OK
+ |.... c1.O.g1: OK
+ |.... c1.O.gi: OK
+ |...... c1.O.gv: OK
+ |...... c1.O.gf: OK
+ .|. O1.f1: OK
+ .|. O1.fi: OK
+ .|... O1.fv: OK
+ .|... O1.ff: OK
+ .|. O1.fl: OK
+ .|... O1.fo: OK
+ |.. O1.g1: OK
+ |.. O1.gi: OK
+ |.... O1.gv: OK
+ |.... O1.gf: OK
+ .|... O1.c.f1: OK
+ .|... O1.c.fi: OK
+ .|..... O1.c.fv: OK
+ .|..... O1.c.ff: OK
+ .|... O1.c.fl: OK
+ .|..... O1.c.fo: OK
+ .|... O1.c.fn: OK
+ |.... O1.c.g1: OK
+ |.... O1.c.gi: OK
+ |...... O1.c.gv: OK
+ |...... O1.c.gf: OK
+ .|... O1.O.f1: OK
+ .|... O1.O.fi: OK
+ .|..... O1.O.fv: OK
+ .|..... O1.O.ff: OK
+ .|... O1.O.fl: OK
+ .|..... O1.O.fo: OK
+ .|... O1.O.fn: OK
+ |.... O1.O.g1: OK
+ |.... O1.O.gi: OK
+ |...... O1.O.gv: OK
+ |...... O1.O.gf: OK
+ .|..... c2.f1: OK
+ .|..... c2.fi: OK
+ .|....... c2.fv: OK
+ .|....... c2.ff: OK
+ .|..... c2.fl: OK
+ .|....... c2.fo: OK
+ |....... c2.g1: OK
+ |....... c2.gi: OK
+ |......... c2.gv: OK
+ |......... c2.gf: OK
+ .|........ c2.c.f1: OK
+ .|........ c2.c.fi: OK
+ .|.......... c2.c.fv: OK
+ .|.......... c2.c.ff: OK
+ .|........ c2.c.fl: OK
+ .|.......... c2.c.fo: OK
+ .|....... c2.c.fn: OK
+ |......... c2.c.g1: OK
+ |......... c2.c.gi: OK
+ |........... c2.c.gv: OK
+ |........... c2.c.gf: OK
+ .|........ c2.O.f1: OK
+ .|........ c2.O.fi: OK
+ .|.......... c2.O.fv: OK
+ .|.......... c2.O.ff: OK
+ .|........ c2.O.fl: OK
+ .|.......... c2.O.fo: OK
+ .|....... c2.O.fn: OK
+ |......... c2.O.g1: OK
+ |......... c2.O.gi: OK
+ |........... c2.O.gv: OK
+ |........... c2.O.gf: OK
+ .|..... O2.f1: OK
+ .|..... O2.fi: OK
+ .|....... O2.fv: OK
+ .|....... O2.ff: OK
+ .|..... O2.fl: OK
+ .|....... O2.fo: OK
+ |....... O2.g1: OK
+ |....... O2.gi: OK
+ |......... O2.gv: OK
+ |......... O2.gf: OK
+ .|........ O2.c.f1: OK
+ .|........ O2.c.fi: OK
+ .|.......... O2.c.fv: OK
+ .|.......... O2.c.ff: OK
+ .|........ O2.c.fl: OK
+ .|.......... O2.c.fo: OK
+ .|....... O2.c.fn: OK
+ |......... O2.c.g1: OK
+ |......... O2.c.gi: OK
+ |........... O2.c.gv: OK
+ |........... O2.c.gf: OK
+ .|........ O2.O.f1: OK
+ .|........ O2.O.fi: OK
+ .|.......... O2.O.fv: OK
+ .|.......... O2.O.ff: OK
+ .|........ O2.O.fl: OK
+ .|.......... O2.O.fo: OK
+ .|....... O2.O.fn: OK
+ |......... O2.O.g1: OK
+ |......... O2.O.gi: OK
+ |........... O2.O.gv: OK
+ |........... O2.O.gf: OK
diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/synchronized.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala
new file mode 100644
index 0000000000..1f0e32992b
--- /dev/null
+++ b/test/files/run/synchronized.scala
@@ -0,0 +1,449 @@
+import java.lang.Thread.holdsLock
+import scala.collection.mutable.StringBuilder
+
+object Util {
+ def checkLocks(held: AnyRef*)(notHeld: AnyRef*) = {
+ val sb = new StringBuilder
+ for (lock <- held) {
+ sb.append(if (holdsLock(lock)) '.' else '!')
+ }
+ print("%5s|" format sb)
+
+ sb.clear()
+ for (lock <- notHeld) {
+ sb.append(if (holdsLock(lock)) '!' else '.')
+ }
+ print("%-15s " format sb)
+
+ (held forall holdsLock) && !(notHeld exists holdsLock)
+ }
+}
+
+class C1 {
+ import Util._
+
+ val lock = new AnyRef
+
+ def f1 = synchronized { checkLocks(this)(this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(C1.this, gfv, gfv.getClass, lock, lock.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass)
+ glv
+ }
+
+ class C {
+ def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, C1.this, C1.this.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) }
+ def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, C1.this, C1.this.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, C1.this, C1.this.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ glv
+ }
+ }
+ val c = new C
+
+ object O {
+ def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, C1.this, C1.this.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, C1.this, C1.this.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) }
+ def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, C1.this, C1.this.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, C1.this, C1.this.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ glv
+ }
+ }
+}
+
+object O1 {
+ import Util._
+
+ val lock = new AnyRef
+
+ def f1 = synchronized { checkLocks(this)(this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass)
+ glv
+ }
+
+ class C {
+ def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, O1, O1.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, O1, O1.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) }
+ def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, O1, O1.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, O1, O1.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, O1, O1.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass)
+ glv
+ }
+ }
+ val c = new C
+
+ object O {
+ def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, O1, O1.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, O1, O1.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) }
+ def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, O1, O1.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, O1, O1.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, O1, O1.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass)
+ glv
+ }
+ }
+}
+
+trait T {
+ import Util._
+
+ val Tclass = Class.forName("T$class")
+
+ val lock = new AnyRef
+
+ def f1 = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass)
+ glv
+ }
+
+ class C {
+ def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, T.this, T.this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ glv
+ }
+ }
+ val c = new C
+
+ object O {
+ def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ glv
+ }
+ }
+}
+
+class C2 extends T
+object O2 extends T
+
+object Test extends App {
+ def check(name: String, result: Boolean) {
+ println("%-10s %s" format (name +":", if (result) "OK" else "FAILED"))
+ }
+
+ val c1 = new C1
+ check("c1.f1", c1.f1)
+ check("c1.fi", c1.fi)
+ check("c1.fv", c1.fv())
+ check("c1.ff", c1.ff)
+ check("c1.fl", c1.fl)
+ check("c1.fo", c1.fo)
+ check("c1.g1", c1.g1)
+ check("c1.gi", c1.gi)
+ check("c1.gv", c1.gv())
+ check("c1.gf", c1.gf)
+// check("c1.gl", c1.gl) // FIXME *.gl are failing because of the issue described in SUGGEST-11
+
+ check("c1.c.f1", c1.c.f1)
+ check("c1.c.fi", c1.c.fi)
+ check("c1.c.fv", c1.c.fv())
+ check("c1.c.ff", c1.c.ff)
+ check("c1.c.fl", c1.c.fl)
+ check("c1.c.fo", c1.c.fo)
+ check("c1.c.fn", c1.c.fn)
+ check("c1.c.g1", c1.c.g1)
+ check("c1.c.gi", c1.c.gi)
+ check("c1.c.gv", c1.c.gv())
+ check("c1.c.gf", c1.c.gf)
+// check("c1.c.gl", c1.c.gl)
+
+ check("c1.O.f1", c1.O.f1)
+ check("c1.O.fi", c1.O.fi)
+ check("c1.O.fv", c1.O.fv())
+ check("c1.O.ff", c1.O.ff)
+ check("c1.O.fl", c1.O.fl)
+ check("c1.O.fo", c1.O.fo)
+ check("c1.O.fn", c1.O.fn)
+ check("c1.O.g1", c1.O.g1)
+ check("c1.O.gi", c1.O.gi)
+ check("c1.O.gv", c1.O.gv())
+ check("c1.O.gf", c1.O.gf)
+// check("c1.O.gl", c1.O.gl)
+
+ check("O1.f1", O1.f1)
+ check("O1.fi", O1.fi)
+ check("O1.fv", O1.fv())
+ check("O1.ff", O1.ff)
+ check("O1.fl", O1.fl)
+ check("O1.fo", O1.fo)
+ check("O1.g1", O1.g1)
+ check("O1.gi", O1.gi)
+ check("O1.gv", O1.gv())
+ check("O1.gf", O1.gf)
+// check("O1.gl", O1.gl)
+
+ check("O1.c.f1", O1.c.f1)
+ check("O1.c.fi", O1.c.fi)
+ check("O1.c.fv", O1.c.fv())
+ check("O1.c.ff", O1.c.ff)
+ check("O1.c.fl", O1.c.fl)
+ check("O1.c.fo", O1.c.fo)
+ check("O1.c.fn", O1.c.fn)
+ check("O1.c.g1", O1.c.g1)
+ check("O1.c.gi", O1.c.gi)
+ check("O1.c.gv", O1.c.gv())
+ check("O1.c.gf", O1.c.gf)
+// check("O1.c.gl", O1.c.gl)
+
+ check("O1.O.f1", O1.O.f1)
+ check("O1.O.fi", O1.O.fi)
+ check("O1.O.fv", O1.O.fv())
+ check("O1.O.ff", O1.O.ff)
+ check("O1.O.fl", O1.O.fl)
+ check("O1.O.fo", O1.O.fo)
+ check("O1.O.fn", O1.O.fn)
+ check("O1.O.g1", O1.O.g1)
+ check("O1.O.gi", O1.O.gi)
+ check("O1.O.gv", O1.O.gv())
+ check("O1.O.gf", O1.O.gf)
+// check("O1.O.gl", O1.O.gl)
+
+ val c2 = new C2
+ check("c2.f1", c2.f1)
+ check("c2.fi", c2.fi)
+ check("c2.fv", c2.fv())
+ check("c2.ff", c2.ff)
+ check("c2.fl", c2.fl)
+ check("c2.fo", c2.fo)
+ check("c2.g1", c2.g1)
+ check("c2.gi", c2.gi)
+ check("c2.gv", c2.gv())
+ check("c2.gf", c2.gf)
+// check("c2.gl", c2.gl)
+
+ check("c2.c.f1", c2.c.f1)
+ check("c2.c.fi", c2.c.fi)
+ check("c2.c.fv", c2.c.fv())
+ check("c2.c.ff", c2.c.ff)
+ check("c2.c.fl", c2.c.fl)
+ check("c2.c.fo", c2.c.fo)
+ check("c2.c.fn", c2.c.fn)
+ check("c2.c.g1", c2.c.g1)
+ check("c2.c.gi", c2.c.gi)
+ check("c2.c.gv", c2.c.gv())
+ check("c2.c.gf", c2.c.gf)
+// check("c2.c.gl", c2.c.gl)
+
+ check("c2.O.f1", c2.O.f1)
+ check("c2.O.fi", c2.O.fi)
+ check("c2.O.fv", c2.O.fv())
+ check("c2.O.ff", c2.O.ff)
+ check("c2.O.fl", c2.O.fl)
+ check("c2.O.fo", c2.O.fo)
+ check("c2.O.fn", c2.O.fn)
+ check("c2.O.g1", c2.O.g1)
+ check("c2.O.gi", c2.O.gi)
+ check("c2.O.gv", c2.O.gv())
+ check("c2.O.gf", c2.O.gf)
+// check("c2.O.gl", c2.O.gl)
+
+ check("O2.f1", O2.f1)
+ check("O2.fi", O2.fi)
+ check("O2.fv", O2.fv())
+ check("O2.ff", O2.ff)
+ check("O2.fl", O2.fl)
+ check("O2.fo", O2.fo)
+ check("O2.g1", O2.g1)
+ check("O2.gi", O2.gi)
+ check("O2.gv", O2.gv())
+ check("O2.gf", O2.gf)
+// check("O2.gl", O2.gl)
+
+ check("O2.c.f1", O2.c.f1)
+ check("O2.c.fi", O2.c.fi)
+ check("O2.c.fv", O2.c.fv())
+ check("O2.c.ff", O2.c.ff)
+ check("O2.c.fl", O2.c.fl)
+ check("O2.c.fo", O2.c.fo)
+ check("O2.c.fn", O2.c.fn)
+ check("O2.c.g1", O2.c.g1)
+ check("O2.c.gi", O2.c.gi)
+ check("O2.c.gv", O2.c.gv())
+ check("O2.c.gf", O2.c.gf)
+// check("O2.c.gl", O2.c.gl)
+
+ check("O2.O.f1", O2.O.f1)
+ check("O2.O.fi", O2.O.fi)
+ check("O2.O.fv", O2.O.fv())
+ check("O2.O.ff", O2.O.ff)
+ check("O2.O.fl", O2.O.fl)
+ check("O2.O.fo", O2.O.fo)
+ check("O2.O.fn", O2.O.fn)
+ check("O2.O.g1", O2.O.g1)
+ check("O2.O.gi", O2.O.gi)
+ check("O2.O.gv", O2.O.gv())
+ check("O2.O.gf", O2.O.gf)
+// check("O2.O.gl", O2.O.gl)
+} \ No newline at end of file
diff --git a/test/files/run/t1195.check b/test/files/run/t1195.check
new file mode 100644
index 0000000000..dc521fb8ca
--- /dev/null
+++ b/test/files/run/t1195.check
@@ -0,0 +1,6 @@
+_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.ScalaObject with scala.Product with scala.Serializable] with scala.ScalaObject with scala.Serializable with java.lang.Object
+_ <: Object with scala.ScalaObject with scala.Product with scala.Serializable
+Object with scala.ScalaObject with scala.Product with scala.Serializable
+_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.ScalaObject with scala.Product with scala.Serializable] with scala.ScalaObject with scala.Serializable with java.lang.Object
+_ <: Object with scala.ScalaObject with scala.Product with scala.Serializable
+Object with scala.ScalaObject with scala.Product with scala.Serializable
diff --git a/test/files/run/t1195.scala b/test/files/run/t1195.scala
new file mode 100644
index 0000000000..81ef5bdb0e
--- /dev/null
+++ b/test/files/run/t1195.scala
@@ -0,0 +1,26 @@
+object Test {
+ def f() = { case class Bar(x: Int); Bar }
+ def g() = { case class Bar(x: Int); Bar(5) }
+ def h() = { case object Bar ; Bar }
+
+ val f1 = f()
+ val g1 = g()
+ val h1 = h()
+
+ def m[T: Manifest](x: T) = println(manifest[T])
+
+ def main(args: Array[String]): Unit = {
+ m(f)
+ m(g)
+ m(h)
+ m(f1)
+ m(g1)
+ m(h1)
+ }
+}
+
+class A1[T] {
+ class B1[U] {
+ def f = { case class D(x: Int) extends A1[String] ; new D(5) }
+ }
+}
diff --git a/test/files/run/t2296a.check b/test/files/run/t2296a.check
new file mode 100644
index 0000000000..f75aec9d81
--- /dev/null
+++ b/test/files/run/t2296a.check
@@ -0,0 +1,2 @@
+J.foo()
+J.foo()
diff --git a/test/files/run/t2296a/J.java b/test/files/run/t2296a/J.java
new file mode 100644
index 0000000000..78ff3e9804
--- /dev/null
+++ b/test/files/run/t2296a/J.java
@@ -0,0 +1,7 @@
+package j;
+
+public class J {
+ protected void foo() {
+ System.out.println("J.foo()");
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t2296a/S.scala b/test/files/run/t2296a/S.scala
new file mode 100644
index 0000000000..532d038a42
--- /dev/null
+++ b/test/files/run/t2296a/S.scala
@@ -0,0 +1,18 @@
+package s {
+ import j.J
+
+ trait S extends J {
+ def bar() {
+ foo()
+ }
+ }
+
+ class SC extends J with S
+}
+
+object Test {
+ def main(args : Array[String]) {
+ (new s.SC).bar()
+ (new s.S { }).bar()
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t2296b.check b/test/files/run/t2296b.check
new file mode 100644
index 0000000000..f75aec9d81
--- /dev/null
+++ b/test/files/run/t2296b.check
@@ -0,0 +1,2 @@
+J.foo()
+J.foo()
diff --git a/test/files/run/t2296b/J_1.java b/test/files/run/t2296b/J_1.java
new file mode 100644
index 0000000000..4c91d47073
--- /dev/null
+++ b/test/files/run/t2296b/J_1.java
@@ -0,0 +1,7 @@
+package j;
+
+public class J_1 {
+ protected void foo() {
+ System.out.println("J.foo()");
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t2296b/S_2.scala b/test/files/run/t2296b/S_2.scala
new file mode 100644
index 0000000000..6cdb0cfaba
--- /dev/null
+++ b/test/files/run/t2296b/S_2.scala
@@ -0,0 +1,18 @@
+package s {
+ import j.J_1
+
+ trait S extends J_1 {
+ def bar() {
+ foo()
+ }
+ }
+
+ class SC extends J_1 with S
+}
+
+object Test {
+ def main(args : Array[String]) {
+ (new s.SC).bar()
+ (new s.S { }).bar()
+ }
+}
diff --git a/test/files/run/t3569.check b/test/files/run/t3569.check
new file mode 100644
index 0000000000..24cee4bf00
--- /dev/null
+++ b/test/files/run/t3569.check
@@ -0,0 +1,17 @@
+1
+private final int Test$X.val1
+private final int Test$X.val2
+private final int Test$X.val3
+private int Test$X.lval1
+private int Test$X.lval2
+private int Test$X.lval3
+private int Test$X.var1
+private int Test$X.var2
+private int Test$X.var3
+private volatile int Test$X.bitmap$priv$0
+public int Test$X.x
+public volatile int Test$X.bitmap$0
+public final int Test$Y.z1
+public final int Test$Y.z2
+public int Test$Y.x
+public int Test$Y.y
diff --git a/test/files/run/t3569.flags b/test/files/run/t3569.flags
new file mode 100644
index 0000000000..6933d924d3
--- /dev/null
+++ b/test/files/run/t3569.flags
@@ -0,0 +1 @@
+-Yinline \ No newline at end of file
diff --git a/test/files/run/t3569.scala b/test/files/run/t3569.scala
new file mode 100644
index 0000000000..4699aeaab3
--- /dev/null
+++ b/test/files/run/t3569.scala
@@ -0,0 +1,32 @@
+object Test {
+ final val bippy1 = 1
+ final lazy val bippy2 = 2
+
+ lazy val lv = scala.util.Random.nextInt()
+
+ class X(final var x: Int) {
+ final var var1: Int = 0
+ final private var var2: Int = 0
+ final private[this] var var3: Int = 0
+
+ final val val1: Int = 1
+ final private val val2: Int = 1
+ final private[this] val val3: Int = 1
+
+ final lazy val lval1: Int = 2
+ final private lazy val lval2: Int = 2
+ final private[this] lazy val lval3: Int = 2
+ }
+ case class Y(final var x: Int, final private var y: Int, final val z1: Int, final private val z2: Int) { }
+
+ def f = new X(0).x += 1
+ def main(args: Array[String]) {
+ f
+ val s = new X(0)
+ s.x += 1
+ println(s.x)
+
+ (classOf[X].getDeclaredFields map ("" + _)).sorted foreach println
+ (classOf[Y].getDeclaredFields map ("" + _)).sorted foreach println
+ }
+}
diff --git a/test/files/run/t3575.check b/test/files/run/t3575.check
new file mode 100644
index 0000000000..8b935ad4a3
--- /dev/null
+++ b/test/files/run/t3575.check
@@ -0,0 +1,20 @@
+Two
+Two$mcIL$sp
+Two$mcLI$sp
+Two$mcII$sp
+TwoLong
+TwoLong$mcIL$sp
+TwoLong$mcLI$sp
+TwoLong$mcII$sp
+TwoCool
+TwoCool$mcIL$sp
+TwoCool$mcLI$sp
+TwoCool$mcII$sp
+TwoShort
+TwoShort$mcIL$sp
+TwoShort$mcLI$sp
+TwoShort$mcII$sp
+TwoMinimal
+TwoMinimal$mcIL$sp
+TwoMinimal$mcLI$sp
+TwoMinimal$mcII$sp
diff --git a/test/files/run/t3575.scala b/test/files/run/t3575.scala
new file mode 100644
index 0000000000..9ccd90a8c4
--- /dev/null
+++ b/test/files/run/t3575.scala
@@ -0,0 +1,55 @@
+// This is here to tell me if the behavior changes, not because
+// the output is endorsed.
+case class Two[
+ @specialized A,
+ @specialized B
+](v: A, w: B)
+
+case class TwoLong[
+ @specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) A,
+ @specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) B
+](v: A, w: B)
+
+case class TwoCool[
+ @specialized(Specializable.Everything) A,
+ @specialized(Specializable.Everything) B
+](v: A, w: B)
+
+case class TwoShort[
+ @specialized() A,
+ @specialized() B
+](v: A, w: B)
+
+case class TwoMinimal[
+ @specialized(Int, AnyRef) A,
+ @specialized(Int, AnyRef) B
+](v: A, w: B)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(Two("Hello", "World").getClass().getName());
+ println(Two(12, "Hello").getClass().getName());
+ println(Two("Hello", 12).getClass().getName());
+ println(Two(12, 12).getClass().getName());
+
+ println(TwoLong("Hello", "World").getClass().getName());
+ println(TwoLong(12, "Hello").getClass().getName());
+ println(TwoLong("Hello", 12).getClass().getName());
+ println(TwoLong(12, 12).getClass().getName());
+
+ println(TwoCool("Hello", "World").getClass().getName());
+ println(TwoCool(12, "Hello").getClass().getName());
+ println(TwoCool("Hello", 12).getClass().getName());
+ println(TwoCool(12, 12).getClass().getName());
+
+ println(TwoShort("Hello", "World").getClass().getName());
+ println(TwoShort(12, "Hello").getClass().getName());
+ println(TwoShort("Hello", 12).getClass().getName());
+ println(TwoShort(12, 12).getClass().getName());
+
+ println(TwoMinimal("Hello", "World").getClass().getName());
+ println(TwoMinimal(12, "Hello").getClass().getName());
+ println(TwoMinimal("Hello", 12).getClass().getName());
+ println(TwoMinimal(12, 12).getClass().getName());
+ }
+}
diff --git a/test/files/run/si4147.scala b/test/files/run/t4147.scala
index c1e2d746a9..c1e2d746a9 100644
--- a/test/files/run/si4147.scala
+++ b/test/files/run/t4147.scala
diff --git a/test/files/run/t4171.check b/test/files/run/t4171.check
new file mode 100644
index 0000000000..d72391a1c4
--- /dev/null
+++ b/test/files/run/t4171.check
@@ -0,0 +1,3 @@
+1
+5
+class Test$B$1
diff --git a/test/files/run/t4171.scala b/test/files/run/t4171.scala
new file mode 100644
index 0000000000..fba2fb5ed6
--- /dev/null
+++ b/test/files/run/t4171.scala
@@ -0,0 +1,11 @@
+object Test {
+ val c = { class C; new C { def foo = 1 } }
+ val a = { class B { def bar = 5 }; class C extends B; new C }
+ val e = { class A; class B extends A; classOf[B] }
+
+ def main(args: Array[String]): Unit = {
+ println(c.foo)
+ println(a.bar)
+ println(e)
+ }
+}
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
index cd7a2905e2..a0600ba859 100644
--- a/test/files/run/t4542.check
+++ b/test/files/run/t4542.check
@@ -15,6 +15,9 @@ scala> val f = new Foo
<console>:8: warning: class Foo is deprecated: foooo
val f = new Foo
^
+<console>:5: warning: class Foo is deprecated: foooo
+ lazy val $result = `f`
+ ^
f: Foo = Bippy
scala>
diff --git a/test/files/run/t4770.check b/test/files/run/t4770.check
new file mode 100644
index 0000000000..38e5a831fa
--- /dev/null
+++ b/test/files/run/t4770.check
@@ -0,0 +1,2 @@
+(a,2)
+(2,a)
diff --git a/test/files/run/t4770.scala b/test/files/run/t4770.scala
new file mode 100644
index 0000000000..25bf3050c3
--- /dev/null
+++ b/test/files/run/t4770.scala
@@ -0,0 +1,15 @@
+package crasher {
+ class Z[@specialized A, @specialized(AnyRef) B](var a: A, var b: B) {
+ override def toString = "" + ((a, b))
+ }
+ object O {
+ def apply[@specialized A, @specialized(AnyRef) B](a0: A, b0: B) = new Z(a0, b0)
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(crasher.O("a", 2))
+ println(crasher.O(2, "a"))
+ }
+}
diff --git a/test/files/run/t4777.check b/test/files/run/t4777.check
new file mode 100644
index 0000000000..11f1f59d43
--- /dev/null
+++ b/test/files/run/t4777.check
@@ -0,0 +1,2 @@
+28
+28
diff --git a/test/files/run/t4777.scala b/test/files/run/t4777.scala
new file mode 100644
index 0000000000..4a811d3b9a
--- /dev/null
+++ b/test/files/run/t4777.scala
@@ -0,0 +1,8 @@
+class A(val a: Int = 13)
+class DefaultsTest(x: Int = 25) extends A(28)
+object DefaultsTest extends DefaultsTest(12)
+
+object Test extends App {
+ println(new DefaultsTest() a)
+ println(DefaultsTest a)
+}
diff --git a/test/files/run/t4875.check b/test/files/run/t4875.check
deleted file mode 100644
index f7609d5ca5..0000000000
--- a/test/files/run/t4875.check
+++ /dev/null
@@ -1,17 +0,0 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import scala.reflect.Code
-import scala.reflect.Code
-
-scala> def codeOf[A](code: Code[A]) = code
-codeOf: [A](code: scala.reflect.Code[A])scala.reflect.Code[A]
-
-scala> codeOf((x: Iterable[_]) => throw new Exception)
-res0: scala.reflect.Code[Iterable[_] => Nothing] = Code(tree = ((x: Iterable[Any]) => throw new scala.`package`.Exception()), manifest = scala.Function1[scala.collection.Iterable[Any], Nothing])
-
-scala>
-
-scala>
diff --git a/test/files/run/t4875.scala b/test/files/run/t4875.scala
deleted file mode 100644
index c17211aede..0000000000
--- a/test/files/run/t4875.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.tools.nsc.interpreter._
-import scala.tools.partest.ReplTest
-
-object Test extends ReplTest {
- class M[@specialized T] { }
-
- def code = """
- |import scala.reflect.Code
- |def codeOf[A](code: Code[A]) = code
- |codeOf((x: Iterable[_]) => throw new Exception)
- """.stripMargin
-}
diff --git a/test/files/run/t5171.check b/test/files/run/t5171.check
new file mode 100644
index 0000000000..159606d35c
--- /dev/null
+++ b/test/files/run/t5171.check
@@ -0,0 +1 @@
+IsList
diff --git a/test/files/run/t5171.scala b/test/files/run/t5171.scala
new file mode 100644
index 0000000000..eb8029df80
--- /dev/null
+++ b/test/files/run/t5171.scala
@@ -0,0 +1,7 @@
+abstract sealed class ArgNumber
+case object IsList extends ArgNumber
+case object ArgNumber
+
+object Test extends App {
+ println(IsList)
+}
diff --git a/test/pending/run/t5224.check b/test/files/run/t5224.check
index 2b920773c0..5bead91b36 100644
--- a/test/pending/run/t5224.check
+++ b/test/files/run/t5224.check
@@ -1,9 +1,9 @@
{
- @serializable class C extends Object with ScalaObject {
+ @new Foo(bar = "qwe") class C extends Object with ScalaObject {
def <init>() = {
super.<init>();
()
}
};
()
-} \ No newline at end of file
+}
diff --git a/test/files/run/t5224.scala b/test/files/run/t5224.scala
new file mode 100644
index 0000000000..2226a69a05
--- /dev/null
+++ b/test/files/run/t5224.scala
@@ -0,0 +1,9 @@
+import scala.reflect._
+import scala.reflect.api._
+
+class Foo(bar: String) extends ClassfileAnnotation
+
+object Test extends App {
+ val tree = scala.reflect.Code.lift{@Foo(bar = "qwe") class C}.tree
+ println(tree.toString)
+} \ No newline at end of file
diff --git a/test/files/run/t5225_1.check b/test/files/run/t5225_1.check
new file mode 100644
index 0000000000..719da572c7
--- /dev/null
+++ b/test/files/run/t5225_1.check
@@ -0,0 +1,4 @@
+{
+ @new transient() @new volatile() var x: Int = 2;
+ ()
+}
diff --git a/test/files/run/t5225_1.scala b/test/files/run/t5225_1.scala
new file mode 100644
index 0000000000..a655b7dd71
--- /dev/null
+++ b/test/files/run/t5225_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect._
+import scala.reflect.api._
+
+object Test extends App {
+ val tree = scala.reflect.Code.lift{@transient @volatile var x = 2}.tree
+ println(tree.toString)
+} \ No newline at end of file
diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check
new file mode 100644
index 0000000000..c4f6b4761e
--- /dev/null
+++ b/test/files/run/t5225_2.check
@@ -0,0 +1,4 @@
+{
+ def foo(@new cloneable() x: Int): String = "";
+ ()
+}
diff --git a/test/files/run/t5225_2.scala b/test/files/run/t5225_2.scala
new file mode 100644
index 0000000000..65ea9b2f73
--- /dev/null
+++ b/test/files/run/t5225_2.scala
@@ -0,0 +1,7 @@
+import scala.reflect._
+import scala.reflect.api._
+
+object Test extends App {
+ val tree = scala.reflect.Code.lift{def foo(@cloneable x: Int) = ""}.tree
+ println(tree.toString)
+} \ No newline at end of file
diff --git a/test/pending/run/t5229_1.check b/test/files/run/t5229_1.check
index e69de29bb2..e69de29bb2 100644
--- a/test/pending/run/t5229_1.check
+++ b/test/files/run/t5229_1.check
diff --git a/test/pending/run/t5229_1.scala b/test/files/run/t5229_1.scala
index 1d7bf0590b..d5af569656 100644
--- a/test/pending/run/t5229_1.scala
+++ b/test/files/run/t5229_1.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5229_2.check b/test/files/run/t5229_2.check
new file mode 100644
index 0000000000..43c25b96af
--- /dev/null
+++ b/test/files/run/t5229_2.check
@@ -0,0 +1,2 @@
+2
+evaluated = ()
diff --git a/test/pending/run/t5229_2.scala b/test/files/run/t5229_2.scala
index 67be7328a6..07f9ac6b84 100644
--- a/test/pending/run/t5229_2.scala
+++ b/test/files/run/t5229_2.scala
@@ -13,7 +13,6 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- val evaluated = toolbox.runExpr(ttree)
+ val evaluated = toolbox.runExpr(code.tree)
println("evaluated = " + evaluated)
}
diff --git a/test/files/run/t5230.check b/test/files/run/t5230.check
index 5db6ec9b38..43c25b96af 100644
--- a/test/files/run/t5230.check
+++ b/test/files/run/t5230.check
@@ -1,2 +1,2 @@
2
-evaluated = null
+evaluated = ()
diff --git a/test/files/run/t5230.scala b/test/files/run/t5230.scala
index 5aab8f9290..d3106ca05c 100644
--- a/test/files/run/t5230.scala
+++ b/test/files/run/t5230.scala
@@ -13,7 +13,6 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- val evaluated = toolbox.runExpr(ttree)
+ val evaluated = toolbox.runExpr(code.tree)
println("evaluated = " + evaluated)
}
diff --git a/test/files/run/t5258a.scala b/test/files/run/t5258a.scala
index deabb8310f..8cc4249e06 100644
--- a/test/files/run/t5258a.scala
+++ b/test/files/run/t5258a.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
} \ No newline at end of file
diff --git a/test/files/run/si5262.check b/test/files/run/t5262.check
index 4c7a875de5..4c7a875de5 100644
--- a/test/files/run/si5262.check
+++ b/test/files/run/t5262.check
diff --git a/test/files/run/si5262.scala b/test/files/run/t5262.scala
index fc4e57aa96..fc4e57aa96 100644
--- a/test/files/run/si5262.scala
+++ b/test/files/run/t5262.scala
diff --git a/test/files/run/t5266_1.check b/test/files/run/t5266_1.check
index 3feac16a0b..35f20802ee 100644
--- a/test/files/run/t5266_1.check
+++ b/test/files/run/t5266_1.check
@@ -1,2 +1,2 @@
2
-evaluated = null \ No newline at end of file
+evaluated = () \ No newline at end of file
diff --git a/test/files/run/t5266_1.scala b/test/files/run/t5266_1.scala
index 18e288e685..4262bc7a7b 100644
--- a/test/files/run/t5266_1.scala
+++ b/test/files/run/t5266_1.scala
@@ -10,7 +10,6 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- val evaluated = toolbox.runExpr(ttree)
+ val evaluated = toolbox.runExpr(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/t5266_2.check b/test/files/run/t5266_2.check
index 3feac16a0b..35f20802ee 100644
--- a/test/files/run/t5266_2.check
+++ b/test/files/run/t5266_2.check
@@ -1,2 +1,2 @@
2
-evaluated = null \ No newline at end of file
+evaluated = () \ No newline at end of file
diff --git a/test/files/run/t5266_2.scala b/test/files/run/t5266_2.scala
index eb319583f8..d0f718dbd7 100644
--- a/test/files/run/t5266_2.scala
+++ b/test/files/run/t5266_2.scala
@@ -11,7 +11,6 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- val evaluated = toolbox.runExpr(ttree)
+ val evaluated = toolbox.runExpr(code.tree)
println("evaluated = " + evaluated)
}
diff --git a/test/files/run/t5269.scala b/test/files/run/t5269.scala
index a30509f3fe..cab99f17e6 100644
--- a/test/files/run/t5269.scala
+++ b/test/files/run/t5269.scala
@@ -17,6 +17,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/t5270.check b/test/files/run/t5270.check
index 08839f6bb2..08839f6bb2 100644
--- a/test/pending/run/t5270.check
+++ b/test/files/run/t5270.check
diff --git a/test/pending/run/t5270.scala b/test/files/run/t5270.scala
index 10f79790b0..934cc13dea 100644
--- a/test/pending/run/t5270.scala
+++ b/test/files/run/t5270.scala
@@ -21,6 +21,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5271_1.check b/test/files/run/t5271_1.check
new file mode 100644
index 0000000000..d4fd544e88
--- /dev/null
+++ b/test/files/run/t5271_1.check
@@ -0,0 +1,11 @@
+{
+ case class C extends Object with ScalaObject with Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo : Int = _;
+ <caseaccessor> <paramaccessor> val bar : Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/t5271_1.scala b/test/files/run/t5271_1.scala
new file mode 100644
index 0000000000..fbc57aead7
--- /dev/null
+++ b/test/files/run/t5271_1.scala
@@ -0,0 +1,13 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ case class C(foo: Int, bar: Int)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ println(code.tree)
+}
diff --git a/test/files/run/t5271_2.check b/test/files/run/t5271_2.check
new file mode 100644
index 0000000000..5a519f265f
--- /dev/null
+++ b/test/files/run/t5271_2.check
@@ -0,0 +1,12 @@
+{
+ case class C extends Object with ScalaObject with Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo : Int = _;
+ <caseaccessor> <paramaccessor> val bar : Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ val c = C.apply(2, 2);
+ scala.this.Predef.println(c.foo.$times(c.bar))
+}
diff --git a/test/files/run/t5271_2.scala b/test/files/run/t5271_2.scala
new file mode 100644
index 0000000000..4bfc574e00
--- /dev/null
+++ b/test/files/run/t5271_2.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ case class C(foo: Int, bar: Int)
+ val c = C(2, 2)
+ println(c.foo * c.bar)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ println(code.tree)
+}
diff --git a/test/files/run/t5271_3.check b/test/files/run/t5271_3.check
new file mode 100644
index 0000000000..be87696f02
--- /dev/null
+++ b/test/files/run/t5271_3.check
@@ -0,0 +1,19 @@
+{
+ object C extends Object with ScalaObject with Serializable {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def qwe: Int = 4
+ };
+ case class C extends Object with ScalaObject with Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo : Int = _;
+ <caseaccessor> <paramaccessor> val bar : Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ val c = C.apply(2, 2);
+ scala.this.Predef.println(c.foo.$times(c.bar).$eq$eq(C.qwe))
+}
diff --git a/test/files/run/t5271_3.scala b/test/files/run/t5271_3.scala
new file mode 100644
index 0000000000..a085bdca4c
--- /dev/null
+++ b/test/files/run/t5271_3.scala
@@ -0,0 +1,16 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ object C { def qwe = 4 }
+ case class C(foo: Int, bar: Int)
+ val c = C(2, 2)
+ println(c.foo * c.bar == C.qwe)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ println(code.tree)
+}
diff --git a/test/files/run/t5271_4.check b/test/files/run/t5271_4.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t5271_4.check
diff --git a/test/files/run/t5271_4.scala b/test/files/run/t5271_4.scala
new file mode 100644
index 0000000000..c253b1adca
--- /dev/null
+++ b/test/files/run/t5271_4.scala
@@ -0,0 +1,13 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ case object C
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/t5272_1.check b/test/files/run/t5272_1.check
new file mode 100644
index 0000000000..9f8d6f24e7
--- /dev/null
+++ b/test/files/run/t5272_1.check
@@ -0,0 +1 @@
+okay \ No newline at end of file
diff --git a/test/pending/run/t5272.scala b/test/files/run/t5272_1.scala
index 3f44d05fb3..882287f033 100644
--- a/test/pending/run/t5272.scala
+++ b/test/files/run/t5272_1.scala
@@ -12,6 +12,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5272_2.check b/test/files/run/t5272_2.check
new file mode 100644
index 0000000000..549f3f3af8
--- /dev/null
+++ b/test/files/run/t5272_2.check
@@ -0,0 +1 @@
+okay2 \ No newline at end of file
diff --git a/test/files/run/t5272_2.scala b/test/files/run/t5272_2.scala
new file mode 100644
index 0000000000..48b6a670bb
--- /dev/null
+++ b/test/files/run/t5272_2.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ 2 match {
+ case x => println("okay" + x)
+ }
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/pending/run/t5273_2.check b/test/files/run/t5273_1.check
index 0cfbf08886..0cfbf08886 100644
--- a/test/pending/run/t5273_2.check
+++ b/test/files/run/t5273_1.check
diff --git a/test/pending/run/t5273_2.scala b/test/files/run/t5273_1.scala
index 1175881c9f..80460a4ae6 100644
--- a/test/pending/run/t5273_2.scala
+++ b/test/files/run/t5273_1.scala
@@ -12,6 +12,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5273_2a.check b/test/files/run/t5273_2a.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/t5273_2a.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/t5273_2a.scala b/test/files/run/t5273_2a.scala
new file mode 100644
index 0000000000..a7a336d8a7
--- /dev/null
+++ b/test/files/run/t5273_2a.scala
@@ -0,0 +1,14 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ val foo :: bar :: _ = List(1, 2, 3)
+ println(foo * bar)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/pending/run/t5273_1.check b/test/files/run/t5273_2b.check
index c551774ca5..c551774ca5 100644
--- a/test/pending/run/t5273_1.check
+++ b/test/files/run/t5273_2b.check
diff --git a/test/pending/run/t5273_1.scala b/test/files/run/t5273_2b.scala
index 8b75084463..85c40f0607 100644
--- a/test/pending/run/t5273_1.scala
+++ b/test/files/run/t5273_2b.scala
@@ -11,6 +11,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5274_1.scala b/test/files/run/t5274_1.scala
index c501172518..74a5b81bcb 100644
--- a/test/files/run/t5274_1.scala
+++ b/test/files/run/t5274_1.scala
@@ -15,6 +15,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5274_2.scala b/test/files/run/t5274_2.scala
index 42991fe5d2..5984a64967 100644
--- a/test/files/run/t5274_2.scala
+++ b/test/files/run/t5274_2.scala
@@ -52,6 +52,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5275.scala b/test/files/run/t5275.scala
index d419834ded..285d8a18a4 100644
--- a/test/files/run/t5275.scala
+++ b/test/files/run/t5275.scala
@@ -10,6 +10,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5276_1a.check b/test/files/run/t5276_1a.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/t5276_1a.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/pending/run/t5276.scala b/test/files/run/t5276_1a.scala
index 432fdb91e4..b717675824 100644
--- a/test/pending/run/t5276.scala
+++ b/test/files/run/t5276_1a.scala
@@ -4,12 +4,11 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
val code = scala.reflect.Code.lift{
- lazy x = 2
+ lazy val x = 2
println(x)
};
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5276_1b.check b/test/files/run/t5276_1b.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/t5276_1b.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/t5276_1b.scala b/test/files/run/t5276_1b.scala
new file mode 100644
index 0000000000..1ff25504ca
--- /dev/null
+++ b/test/files/run/t5276_1b.scala
@@ -0,0 +1,14 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ implicit lazy val x = 2
+ println(implicitly[Int])
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/t5276_2a.check b/test/files/run/t5276_2a.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/t5276_2a.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/t5276_2a.scala b/test/files/run/t5276_2a.scala
new file mode 100644
index 0000000000..af5ff2a565
--- /dev/null
+++ b/test/files/run/t5276_2a.scala
@@ -0,0 +1,17 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ class C {
+ lazy val x = 2
+ }
+
+ println(new C().x)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/t5276_2b.check b/test/files/run/t5276_2b.check
new file mode 100644
index 0000000000..d8263ee986
--- /dev/null
+++ b/test/files/run/t5276_2b.check
@@ -0,0 +1 @@
+2 \ No newline at end of file
diff --git a/test/files/run/t5276_2b.scala b/test/files/run/t5276_2b.scala
new file mode 100644
index 0000000000..63904b2898
--- /dev/null
+++ b/test/files/run/t5276_2b.scala
@@ -0,0 +1,18 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ class C {
+ implicit lazy val x = 2
+ def y = implicitly[Int]
+ }
+
+ println(new C().y)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/files/run/t5277_1.scala b/test/files/run/t5277_1.scala
index 57acd699ff..0aaec7cdf2 100644
--- a/test/files/run/t5277_1.scala
+++ b/test/files/run/t5277_1.scala
@@ -16,6 +16,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5277_2.scala b/test/files/run/t5277_2.scala
index 67b6b000bc..91ed55122a 100644
--- a/test/files/run/t5277_2.scala
+++ b/test/files/run/t5277_2.scala
@@ -13,6 +13,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5279.scala b/test/files/run/t5279.scala
index 39e7dd2c66..cef58535d5 100644
--- a/test/files/run/t5279.scala
+++ b/test/files/run/t5279.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/t5293-map.scala b/test/files/run/t5293-map.scala
new file mode 100644
index 0000000000..2707aed07e
--- /dev/null
+++ b/test/files/run/t5293-map.scala
@@ -0,0 +1,88 @@
+
+
+
+import scala.collection.JavaConverters._
+
+
+
+object Test extends App {
+
+ def bench(label: String)(body: => Unit): Long = {
+ val start = System.nanoTime
+
+ 0.until(10).foreach(_ => body)
+
+ val end = System.nanoTime
+
+ //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
+
+ end - start
+ }
+
+ def benchJava(values: java.util.Map[Int, Int]) = {
+ bench("Java Map") {
+ val m = new java.util.HashMap[Int, Int]
+
+ m.putAll(values)
+ }
+ }
+
+ def benchScala(values: Iterable[(Int, Int)]) = {
+ bench("Scala Map") {
+ val m = new scala.collection.mutable.HashMap[Int, Int]
+
+ m ++= values
+ }
+ }
+
+ def benchScalaSorted(values: Iterable[(Int, Int)]) = {
+ bench("Scala Map sorted") {
+ val m = new scala.collection.mutable.HashMap[Int, Int]
+
+ m ++= values.toArray.sorted
+ }
+ }
+
+ def benchScalaPar(values: Iterable[(Int, Int)]) = {
+ bench("Scala ParMap") {
+ val m = new scala.collection.parallel.mutable.ParHashMap[Int, Int] map { x => x }
+
+ m ++= values
+ }
+ }
+
+ val total = 50000
+ val values = (0 until total) zip (0 until total)
+ val map = scala.collection.mutable.HashMap.empty[Int, Int]
+
+ map ++= values
+
+ // warmup
+ for (x <- 0 until 5) {
+ benchJava(map.asJava)
+ benchScala(map)
+ benchScalaPar(map)
+ benchJava(map.asJava)
+ benchScala(map)
+ benchScalaPar(map)
+ }
+
+ val javamap = benchJava(map.asJava)
+ val scalamap = benchScala(map)
+ val scalaparmap = benchScalaPar(map)
+
+ // println(javamap)
+ // println(scalamap)
+ // println(scalaparmap)
+
+ assert(scalamap < (javamap * 10), "scalamap: " + scalamap + " vs. javamap: " + javamap)
+ assert(scalaparmap < (javamap * 10), "scalaparmap: " + scalaparmap + " vs. javamap: " + javamap)
+}
+
+
+
+
+
+
+
+
diff --git a/test/files/run/t5293.scala b/test/files/run/t5293.scala
index de1efaec4a..01ead45d2a 100644
--- a/test/files/run/t5293.scala
+++ b/test/files/run/t5293.scala
@@ -70,8 +70,8 @@ object Test extends App {
val scalaset = benchScala(set)
val scalaparset = benchScalaPar(set)
- assert(scalaset < (javaset * 4))
- assert(scalaparset < (javaset * 4))
+ assert(scalaset < (javaset * 8), "scalaset: " + scalaset + " vs. javaset: " + javaset)
+ assert(scalaparset < (javaset * 8), "scalaparset: " + scalaparset + " vs. javaset: " + javaset)
}
diff --git a/test/files/run/t5334_1.check b/test/files/run/t5334_1.check
new file mode 100644
index 0000000000..96d80cd6c4
--- /dev/null
+++ b/test/files/run/t5334_1.check
@@ -0,0 +1 @@
+C \ No newline at end of file
diff --git a/test/pending/run/t5334_1.scala b/test/files/run/t5334_1.scala
index c1eba89c2b..9887bebf78 100644
--- a/test/pending/run/t5334_1.scala
+++ b/test/files/run/t5334_1.scala
@@ -4,12 +4,11 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
val code = scala.reflect.Code.lift{
- class C
+ class C { override def toString = "C" }
new C
};
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ println(toolbox.runExpr(code.tree))
}
diff --git a/test/files/run/t5334_2.check b/test/files/run/t5334_2.check
new file mode 100644
index 0000000000..613d286a18
--- /dev/null
+++ b/test/files/run/t5334_2.check
@@ -0,0 +1 @@
+List((C,C)) \ No newline at end of file
diff --git a/test/pending/run/t5334_2.scala b/test/files/run/t5334_2.scala
index 361b8c85f2..775a05aaf7 100644
--- a/test/pending/run/t5334_2.scala
+++ b/test/files/run/t5334_2.scala
@@ -4,12 +4,11 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
val code = scala.reflect.Code.lift{
- class C
+ class C { override def toString() = "C" }
List((new C, new C))
};
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ println(toolbox.runExpr(code.tree))
}
diff --git a/test/files/run/t5335.scala b/test/files/run/t5335.scala
index 9a8b91f04d..8e2ed59db6 100644
--- a/test/files/run/t5335.scala
+++ b/test/files/run/t5335.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/files/run/si5374.check b/test/files/run/t5374.check
index 6be88d77ec..6be88d77ec 100644
--- a/test/files/run/si5374.check
+++ b/test/files/run/t5374.check
diff --git a/test/files/run/si5374.scala b/test/files/run/t5374.scala
index 9b1671e795..9b1671e795 100644
--- a/test/files/run/si5374.scala
+++ b/test/files/run/t5374.scala
diff --git a/test/files/run/si5375.check b/test/files/run/t5375.check
index 7d3002ffda..7d3002ffda 100644
--- a/test/files/run/si5375.check
+++ b/test/files/run/t5375.check
diff --git a/test/files/run/si5375.scala b/test/files/run/t5375.scala
index e4b329deae..e4b329deae 100644
--- a/test/files/run/si5375.scala
+++ b/test/files/run/t5375.scala
diff --git a/test/files/run/t5380.scala b/test/files/run/t5380.scala
new file mode 100644
index 0000000000..6083161a9b
--- /dev/null
+++ b/test/files/run/t5380.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]) {
+ val f = () => return try { 1 } catch { case _ => 0 }
+ f()
+ }
+}
diff --git a/test/files/run/t5419.check b/test/files/run/t5419.check
new file mode 100644
index 0000000000..7e6d739354
--- /dev/null
+++ b/test/files/run/t5419.check
@@ -0,0 +1 @@
+(5: Int(5) @Foo)
diff --git a/test/files/run/t5419.scala b/test/files/run/t5419.scala
new file mode 100644
index 0000000000..695786e5c4
--- /dev/null
+++ b/test/files/run/t5419.scala
@@ -0,0 +1,9 @@
+import scala.reflect._
+import scala.reflect.api._
+
+class Foo extends StaticAnnotation
+
+object Test extends App {
+ val tree = scala.reflect.Code.lift{5: @Foo}.tree
+ println(tree.toString)
+} \ No newline at end of file
diff --git a/test/files/run/t5423.scala b/test/files/run/t5423.scala
index 2139773ff1..fc507c417b 100644
--- a/test/files/run/t5423.scala
+++ b/test/files/run/t5423.scala
@@ -6,7 +6,7 @@ import scala.reflect.Code
final class table extends StaticAnnotation
@table class A
-object Test extends App{
+object Test extends App {
val s = classToSymbol(classOf[A])
- println(s.getAnnotations)
+ println(s.annotations)
}
diff --git a/test/files/run/t5488-fn.check b/test/files/run/t5488-fn.check
new file mode 100644
index 0000000000..18907d0ab8
--- /dev/null
+++ b/test/files/run/t5488-fn.check
@@ -0,0 +1,17 @@
+B$mcII$sp
+B$mcIL$sp
+B$mcIV$sp
+B$mcLI$sp
+B
+B$mcLV$sp
+B$mcVI$sp
+B$mcVL$sp
+B$mcVV$sp
+C$mcIII$sp
+C$mcIIL$sp
+C$mcILI$sp
+C$mcILL$sp
+C$mcLII$sp
+C$mcLIL$sp
+C$mcLLI$sp
+C
diff --git a/test/files/run/t5488-fn.scala b/test/files/run/t5488-fn.scala
new file mode 100644
index 0000000000..d17bcf90a5
--- /dev/null
+++ b/test/files/run/t5488-fn.scala
@@ -0,0 +1,27 @@
+class B[@specialized(Int, AnyRef, Unit) A, @specialized(Int, AnyRef, Unit) B](f: A => B)
+class C[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B, @specialized(Int, AnyRef) C](f: (A, B) => C)
+
+object Test {
+ def main(args:Array[String]) {
+ def show(x: Any) = println(x.getClass.getName)
+
+ show(new B((x: Int) => 1))
+ show(new B((x: Int) => "abc"))
+ show(new B((x: Int) => ()))
+ show(new B((x: AnyRef) => 1))
+ show(new B((x: AnyRef) => "abc"))
+ show(new B((x: AnyRef) => ()))
+ show(new B((x: Unit) => 1))
+ show(new B((x: Unit) => "abc"))
+ show(new B((x: Unit) => ()))
+
+ show(new C((x: Int, y: Int) => 1))
+ show(new C((x: Int, y: Int) => "abc"))
+ show(new C((x: Int, y: AnyRef) => 1))
+ show(new C((x: Int, y: AnyRef) => "abc"))
+ show(new C((x: AnyRef, y: Int) => 1))
+ show(new C((x: AnyRef, y: Int) => "abc"))
+ show(new C((x: AnyRef, y: AnyRef) => 1))
+ show(new C((x: AnyRef, y: AnyRef) => "abc"))
+ }
+}
diff --git a/test/files/run/t5488.check b/test/files/run/t5488.check
new file mode 100644
index 0000000000..ccd98c4dbc
--- /dev/null
+++ b/test/files/run/t5488.check
@@ -0,0 +1,14 @@
+A0$mcI$sp
+A0
+B0$mcII$sp
+B0$mcIL$sp
+B0$mcLI$sp
+B0
+C0$mcIII$sp
+C0$mcIIL$sp
+C0$mcILI$sp
+C0$mcILL$sp
+C0$mcLII$sp
+C0$mcLIL$sp
+C0$mcLLI$sp
+C0
diff --git a/test/files/run/t5488.scala b/test/files/run/t5488.scala
new file mode 100644
index 0000000000..7bab0cdc3c
--- /dev/null
+++ b/test/files/run/t5488.scala
@@ -0,0 +1,26 @@
+class A0[@specialized(Int, AnyRef) A]()
+class B0[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B]()
+class C0[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B, @specialized(Int, AnyRef) C]()
+
+object Test {
+ def main(args:Array[String]) {
+ def show(x: Any) = println(x.getClass.getName)
+
+ show(new A0[Int]())
+ show(new A0[AnyRef]())
+
+ show(new B0[Int, Int]())
+ show(new B0[Int, AnyRef]())
+ show(new B0[AnyRef, Int]())
+ show(new B0[AnyRef, AnyRef]())
+
+ show(new C0[Int, Int, Int]())
+ show(new C0[Int, Int, AnyRef]())
+ show(new C0[Int, AnyRef, Int]())
+ show(new C0[Int, AnyRef, AnyRef]())
+ show(new C0[AnyRef, Int, Int]())
+ show(new C0[AnyRef, Int, AnyRef]())
+ show(new C0[AnyRef, AnyRef, Int]())
+ show(new C0[AnyRef, AnyRef, AnyRef]())
+ }
+}
diff --git a/test/files/run/t5500.check b/test/files/run/t5500.check
new file mode 100644
index 0000000000..19c6dda00e
--- /dev/null
+++ b/test/files/run/t5500.check
@@ -0,0 +1,2 @@
+C1$mcLI$sp
+C2$mcLI$sp
diff --git a/test/files/run/t5500.scala b/test/files/run/t5500.scala
new file mode 100644
index 0000000000..6fbe168a54
--- /dev/null
+++ b/test/files/run/t5500.scala
@@ -0,0 +1,12 @@
+import scala.{specialized => spec}
+
+class C1[@spec(Int, AnyRef) A, @spec(Int, AnyRef) B](v:A, w:B)
+
+class C2[@spec(Unit, Boolean, Byte, Char, Short, Int, Long, Float, Double, AnyRef) A, @spec(Unit, Boolean, Byte, Char, Short, Int, Long, Float, Double, AnyRef) B](v:A, w:B)
+
+object Test {
+ def main(args:Array[String]) {
+ println(new C1("abc", 123).getClass.getName)
+ println(new C2[String, Int]("abc", 123).getClass.getName)
+ }
+}
diff --git a/test/files/run/t5500b.check b/test/files/run/t5500b.check
new file mode 100644
index 0000000000..4259b24b53
--- /dev/null
+++ b/test/files/run/t5500b.check
@@ -0,0 +1,28 @@
+C1A$mcLI$sp
+C1A$mcLD$sp
+C1A
+C1A$mcII$sp
+C1A$mcID$sp
+C1A$mcIL$sp
+C1A$mcDI$sp
+C1A$mcDD$sp
+C1A$mcDL$sp
+C1B$mcLI$sp
+C1B$mcLD$sp
+C1B
+C1B$mcII$sp
+C1B$mcID$sp
+C1B$mcIL$sp
+C1B$mcDI$sp
+C1B$mcDD$sp
+C1B$mcDL$sp
+C1C$mcLI$sp
+C1C$mcLI$sp
+C1C$mcLD$sp
+C1C
+C1C$mcII$sp
+C1C$mcID$sp
+C1C$mcIL$sp
+C1C$mcDI$sp
+C1C$mcDD$sp
+C1C$mcDL$sp
diff --git a/test/files/run/t5500b.scala b/test/files/run/t5500b.scala
new file mode 100644
index 0000000000..32de858e7b
--- /dev/null
+++ b/test/files/run/t5500b.scala
@@ -0,0 +1,51 @@
+import scala.{specialized => spec}
+
+class C1A[
+ @spec(Double, Int, AnyRef) A,
+ @spec(Double, Int, AnyRef) B
+]
+
+class C1B[
+ @spec(Double, Int, AnyRef) A,
+ @spec(Double, Int, AnyRef) B
+](v: A)
+
+class C1C[
+ @spec(Double, Int, AnyRef) A,
+ @spec(Double, Int, AnyRef) B
+](v:A, w:B)
+
+object Test {
+ def main(args:Array[String]) {
+ println(new C1A[String, Int].getClass.getName)
+ println(new C1A[String, Double].getClass.getName)
+ println(new C1A[String, String].getClass.getName)
+ println(new C1A[Int, Int].getClass.getName)
+ println(new C1A[Int, Double].getClass.getName)
+ println(new C1A[Int, String].getClass.getName)
+ println(new C1A[Double, Int].getClass.getName)
+ println(new C1A[Double, Double].getClass.getName)
+ println(new C1A[Double, String].getClass.getName)
+
+ println(new C1B[String, Int]("abc").getClass.getName)
+ println(new C1B[String, Double]("abc").getClass.getName)
+ println(new C1B[String, String]("abc").getClass.getName)
+ println(new C1B[Int, Int](1).getClass.getName)
+ println(new C1B[Int, Double](1).getClass.getName)
+ println(new C1B[Int, String](1).getClass.getName)
+ println(new C1B[Double, Int](1d).getClass.getName)
+ println(new C1B[Double, Double](1d).getClass.getName)
+ println(new C1B[Double, String](1d).getClass.getName)
+
+ println(new C1C("abc", 123).getClass.getName)
+ println(new C1C("abc", 123).getClass.getName)
+ println(new C1C("a", 1d).getClass.getName)
+ println(new C1C("a", "a").getClass.getName)
+ println(new C1C(1, 1).getClass.getName)
+ println(new C1C(1, 1d).getClass.getName)
+ println(new C1C(1, "a").getClass.getName)
+ println(new C1C(1d, 1).getClass.getName)
+ println(new C1C(1d, 1d).getClass.getName)
+ println(new C1C(1d, "a").getClass.getName)
+ }
+}
diff --git a/test/files/run/t5527.check b/test/files/run/t5527.check
new file mode 100644
index 0000000000..4a8a9ce602
--- /dev/null
+++ b/test/files/run/t5527.check
@@ -0,0 +1,99 @@
+[[syntax trees at end of parser]]// Scala source: newSource1
+package <empty> {
+ object UselessComments extends scala.ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ var z = 0;
+ def test1 = {
+ object Maybe extends scala.ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** Some comment inside */
+ def nothing() = ()
+ };
+ ()
+ };
+ def test2 = {
+ var x = 4;
+ if (true)
+ {
+ x = 5;
+ val y = 6;
+ ()
+ }
+ else
+ ()
+ };
+ def test3 = {
+ if (true)
+ z = 3
+ else
+ ();
+ val t = 4;
+ 0.to(4).foreach(((i) => println(i)))
+ };
+ val test4 = 'a' match {
+ case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true
+ case _ => false
+ }
+ };
+ /** comments that we should keep */
+ object UsefulComments extends scala.ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** class A */
+ class A extends scala.ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** f */
+ def f(i: Int) = i;
+ /** v */
+ val v = 1;
+ /** u */
+ var u = 2
+ };
+ /** trait B */
+ abstract trait B extends scala.ScalaObject {
+ def $init$() = {
+ ()
+ };
+ /** T */
+ type T >: _root_.scala.Nothing <: _root_.scala.Any;
+ /** f */
+ def f(i: Int): scala.Unit;
+ /** v */
+ val v = 1;
+ /** u */
+ var u = 2
+ };
+ /** object C */
+ object C extends scala.ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** f */
+ def f(i: Int) = i;
+ /** v */
+ val v = 1;
+ /** u */
+ var u = 2
+ };
+ /** class D */
+ @new deprecated("use ... instead", "2.10.0") class D extends scala.ScalaObject {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t5527.scala b/test/files/run/t5527.scala
new file mode 100644
index 0000000000..2449ff60c3
--- /dev/null
+++ b/test/files/run/t5527.scala
@@ -0,0 +1,107 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.doc.{Settings, DocFactory}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:parser -Yrangepos -Ystop-after:parser -d " + testOutput.path
+
+ override def code = """
+ // SI-5527
+ object UselessComments {
+
+ var z = 0
+
+ def test1 = {
+ /** Some comment here */
+ object Maybe {
+ /** Some comment inside */
+ def nothing() = ()
+ }
+ }
+
+ def test2 = {
+ var x = 4
+ if (true) {
+ /** Testing 123 */
+ x = 5
+ val y = 6
+ }
+ }
+
+ def test3 = {
+ if (true)
+ z = 3
+
+ /** Calculate this result. */
+ val t = 4
+ for (i <- 0 to 4)
+ println(i)
+ }
+
+ val test4 = ('a') match {
+ /** Another digit is a giveaway. */
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ true
+ case _ =>
+ false
+ }
+ }
+
+ /** comments that we should keep */
+ object UsefulComments {
+ /** class A */
+ class A {
+ /** f */
+ def f(i: Int) = i
+ /** v */
+ val v = 1
+ /** u */
+ var u = 2
+ }
+ /** trait B */
+ trait B {
+ /** T */
+ type T
+ /** f */
+ def f(i: Int)
+ /** v */
+ val v = 1
+ /** u */
+ var u = 2
+ }
+ /** object C */
+ object C {
+ /** f */
+ def f(i: Int) = i
+ /** v */
+ val v = 1
+ /** u */
+ var u = 2
+ }
+ /** class D */
+ @deprecated("use ... instead", "2.10.0")
+ class D
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newCompiler(args: String*): Global = {
+ // we want the Scaladoc compiler here, because it keeps DocDef nodes in the tree
+ val settings = new Settings(_ => ())
+ val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ new DocFactory(new ConsoleReporter(settings), settings).compiler
+ }
+
+ override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/files/run/t5530.check b/test/files/run/t5530.check
new file mode 100644
index 0000000000..1013e3356f
--- /dev/null
+++ b/test/files/run/t5530.check
@@ -0,0 +1,2 @@
+something like this
+ 7 now works!.
diff --git a/test/files/run/t5530.flags b/test/files/run/t5530.flags
new file mode 100644
index 0000000000..e1b37447c9
--- /dev/null
+++ b/test/files/run/t5530.flags
@@ -0,0 +1 @@
+-Xexperimental \ No newline at end of file
diff --git a/test/files/run/t5530.scala b/test/files/run/t5530.scala
new file mode 100644
index 0000000000..c8109a4004
--- /dev/null
+++ b/test/files/run/t5530.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(s"""something like this
+ ${3+4} now works!.""")
+} \ No newline at end of file
diff --git a/test/files/run/t5532.flags b/test/files/run/t5532.flags
new file mode 100644
index 0000000000..e1b37447c9
--- /dev/null
+++ b/test/files/run/t5532.flags
@@ -0,0 +1 @@
+-Xexperimental \ No newline at end of file
diff --git a/test/files/run/t5532.scala b/test/files/run/t5532.scala
new file mode 100644
index 0000000000..75004730bf
--- /dev/null
+++ b/test/files/run/t5532.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val x = s"1"
+ val y = s"2"
+} \ No newline at end of file
diff --git a/test/files/run/t5537.check b/test/files/run/t5537.check
new file mode 100644
index 0000000000..68c3ebf2e2
--- /dev/null
+++ b/test/files/run/t5537.check
@@ -0,0 +1,20 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> List[Predef.type]()
+res0: List[scala.Predef.type] = List()
+
+scala> List[scala.`package`.type]()
+res1: List[scala.type] = List()
+
+scala> List[List.type]()
+res2: List[scala.collection.immutable.List.type] = List()
+
+scala> List[Set.type]()
+res3: List[Set.type] = List()
+
+scala>
+
+scala>
diff --git a/test/files/run/t5537.scala b/test/files/run/t5537.scala
new file mode 100644
index 0000000000..ae88dcc11f
--- /dev/null
+++ b/test/files/run/t5537.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+List[Predef.type]()
+List[scala.`package`.type]()
+List[List.type]()
+List[Set.type]()
+ """
+}
diff --git a/test/files/run/t5545.check b/test/files/run/t5545.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t5545.check
diff --git a/test/files/run/t5545.scala b/test/files/run/t5545.scala
new file mode 100644
index 0000000000..7efa6d84f1
--- /dev/null
+++ b/test/files/run/t5545.scala
@@ -0,0 +1,27 @@
+import scala.tools.partest._
+import java.io._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ override def code = """
+ // SI-5545
+ trait F[@specialized(Int) T1, R] {
+ def f(v1: T1): R
+ def g = v1 => f(v1)
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ // the bug manifests at the second compilation, when the bytecode is already there
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check
index 7670962db2..f123bc8f25 100644
--- a/test/files/run/tailcalls.check
+++ b/test/files/run/tailcalls.check
@@ -45,8 +45,7 @@ test TailCall.g3 was successful
test TailCall.h1 was successful
test NonTailCall.f1 0 1 2 was successful
-test NonTailCall.f2 was successful
-
+test NonTailCall.f2
test TailCall.b1 was successful
test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
diff --git a/test/files/run/test-cpp.check b/test/files/run/test-cpp.check
new file mode 100644
index 0000000000..40a976119f
--- /dev/null
+++ b/test/files/run/test-cpp.check
@@ -0,0 +1,73 @@
+37c37
+< locals: value args, value x, value y
+---
+> locals: value args
+42,43d41
+< 52 CONSTANT(2)
+< 52 STORE_LOCAL(value x)
+45,46d42
+< 53 LOAD_LOCAL(value x)
+< 53 STORE_LOCAL(value y)
+49c45
+< 54 LOAD_LOCAL(value y)
+---
+> 54 CONSTANT(2)
+92c88
+< locals: value args, value x, value y
+---
+> locals: value args, value x
+101,102d96
+< 82 LOAD_LOCAL(value x)
+< 82 STORE_LOCAL(value y)
+105c99
+< 83 LOAD_LOCAL(value y)
+---
+> 83 LOAD_LOCAL(value x)
+135c129
+< locals: value args, value x, value y
+---
+> locals: value args
+140,141d133
+< 66 THIS(TestAliasChainDerefThis)
+< 66 STORE_LOCAL(value x)
+143,144d134
+< 67 LOAD_LOCAL(value x)
+< 67 STORE_LOCAL(value y)
+147c137
+< 68 LOAD_LOCAL(value y)
+---
+> 68 THIS(Object)
+176c166
+< locals: value x, value y
+---
+> locals: value x
+181,182d170
+< 29 LOAD_LOCAL(value x)
+< 29 STORE_LOCAL(value y)
+185c173
+< 30 LOAD_LOCAL(value y)
+---
+> 30 LOAD_LOCAL(value x)
+223,224d210
+< 97 LOAD_LOCAL(variable x)
+< 97 STORE_LOCAL(variable y)
+227c213
+< 98 LOAD_LOCAL(variable y)
+---
+> 98 LOAD_LOCAL(variable x)
+233,234d218
+< 101 LOAD_LOCAL(variable y)
+< 101 STORE_LOCAL(variable x)
+236c220
+< 102 LOAD_LOCAL(variable x)
+---
+> 102 LOAD_LOCAL(variable y)
+345c329
+< 41 THIS(TestSetterInline)
+---
+> 41 THIS(Object)
+347c331
+< 41 CALL_METHOD TestSetterInline._postSetHook_$eq (static-instance)
+---
+> 41 STORE_FIELD variable _postSetHook (dynamic)
+
diff --git a/test/files/run/test-cpp.scala b/test/files/run/test-cpp.scala
new file mode 100644
index 0000000000..5b3bc7b746
--- /dev/null
+++ b/test/files/run/test-cpp.scala
@@ -0,0 +1,104 @@
+/**
+ * The only change is in the decision to replace a LOAD_LOCAL(l)
+ * in the copy-propagation performed before ClosureElimination.
+ *
+ * In the general case, the local variable 'l' is connected through
+ * a alias chain with other local variables and at the end of the
+ * alias chain there may be a Value, call it 'v'.
+ *
+ * If 'v' is cheaper to access (it is a Deref(This) or Const(_)), then
+ * replace the instruction to load it from the cheaper place.
+ * Otherwise, we use the local variable at the end of the alias chain
+ * instead of 'l'.
+ */
+
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def printIcodeAfterPhase = "dce"
+}
+
+import scala.util.Random._
+
+/**
+ * The example in the bug report (Issue-5321): an alias chain which store
+ * an Unknown. Should remove local variable 'y'.
+ */
+object TestBugReport {
+ def test(x: Int) = {
+ val y = x
+ println(y)
+ }
+}
+
+/**
+ * The code taken from scala.tools.nsc.settings.Settings:
+ * After inlining of the setter is performed, there is an opportunity for
+ * copy-propagation to eliminate some local variables.
+ */
+object TestSetterInline {
+ private var _postSetHook: this.type => Unit = (x: this.type) => ()
+ def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this }
+}
+
+
+/**
+ * The access of the local variable 'y' should be replaced by the
+ * constant.
+ */
+object TestAliasChainConstat {
+
+ def main(args: Array[String]): Unit = {
+ val x = 2
+ val y = x
+ println(y)
+ }
+}
+
+/**
+ * At the end of the alias chain we have a reference to 'this'.
+ * The local variables should be all discarded and replace by a
+ * direct reference to this
+ */
+class TestAliasChainDerefThis {
+
+ def main(args: Array[String]): Unit = {
+ val x = this
+ val y = x
+ println(y)
+ }
+}
+
+/**
+ * At the end of the alias chain, there is the value of a field.
+ * The use of variable 'y' should be replaced by 'x', not by an access
+ * to the field 'f' since it is more costly.
+ */
+object TestAliasChainDerefField {
+ def f = nextInt
+
+ def main(args: Array[String]): Unit = {
+ val x = f
+ val y = x
+ println(y)
+ }
+}
+
+
+/**
+ * The first time 'println' is called, 'x' is replaced by 'y'
+ * and the second time, 'y' is replaced by 'x'. But none of them
+ * can be removed.
+ */
+object TestDifferentBindings {
+
+ def main(args: Array[String]): Unit = {
+ var x = nextInt
+ var y = x
+ println(y)
+
+ y = nextInt
+ x = y
+ println(x)
+ }
+}
diff --git a/test/files/run/virtpatmat_partial.check b/test/files/run/virtpatmat_partial.check
index 093020ce05..1555eca82b 100644
--- a/test/files/run/virtpatmat_partial.check
+++ b/test/files/run/virtpatmat_partial.check
@@ -1,2 +1,4 @@
Map(a -> Some(1), b -> None)
-Map(a -> 1) \ No newline at end of file
+79
+undefined
+Map(a -> 1)
diff --git a/test/files/run/virtpatmat_partial.scala b/test/files/run/virtpatmat_partial.scala
index c408b31983..6597f2f5ae 100644
--- a/test/files/run/virtpatmat_partial.scala
+++ b/test/files/run/virtpatmat_partial.scala
@@ -4,6 +4,29 @@ object Test extends App {
val res = a collect {case (p, Some(a)) => (p, a)}
+ final val GT = 79
+ final val GTGT = 93
+ final val GTGTGT = 94
+ final val GTEQ = 81
+ final val GTGTEQ = 113
+ final val GTGTGTEQ = 114
+ final val ASSIGN = 75
+
+ def acceptClosingAngle(in: Int) {
+ val closers: PartialFunction[Int, Int] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+
// should uncurry to:
// val res: Map[String,Int] = a.collect[(String, Int), Map[String,Int]](
// new PartialFunction[(String, Option[Int]),(String, Int)] {
diff --git a/test/files/run/virtpatmat_switch.scala b/test/files/run/virtpatmat_switch.scala
index 2e2c31e8e5..1329c19d0f 100644
--- a/test/files/run/virtpatmat_switch.scala
+++ b/test/files/run/virtpatmat_switch.scala
@@ -14,9 +14,15 @@ object Test extends App {
case 'b' => "got b"
case _ => "got some letter"
}
+
+ def byteSwitch(x: Byte) = x match {
+ case 'a' => "got a"
+ case 'b' => "got b"
+ case _ => "got some letter"
+ }
println(charSwitch('a'))
- println(charSwitch('b'))
+ println(byteSwitch('b'))
println(charSwitch('z'))
def implicitDefault(x: Int) = x match {
diff --git a/test/files/run/virtpatmat_try.check b/test/files/run/virtpatmat_try.check
new file mode 100644
index 0000000000..80ebbf494a
--- /dev/null
+++ b/test/files/run/virtpatmat_try.check
@@ -0,0 +1,2 @@
+meh
+B
diff --git a/test/files/run/virtpatmat_try.flags b/test/files/run/virtpatmat_try.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/run/virtpatmat_try.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/run/virtpatmat_try.scala b/test/files/run/virtpatmat_try.scala
new file mode 100644
index 0000000000..46e67cb72e
--- /dev/null
+++ b/test/files/run/virtpatmat_try.scala
@@ -0,0 +1,47 @@
+object Test extends App {
+ case class A(val x: String) extends Throwable
+ class B extends Exception { override def toString = "B" }
+ def bla = 0
+
+ try {
+ throw new A("meh")
+ } catch { // this should emit a "catch-switch"
+ case y: A => println(y.x)
+ case (_ : A | _ : B) => println("B")
+ case _ => println("other")
+ }
+
+ try {
+ throw new B()
+ } catch { // case classes and alternative flattening aren't supported yet, but could be in principle
+ // case A(x) => println(x)
+ case y: A => println(y.x)
+ case x@((_ : A) | (_ : B)) => println(x)
+ case _ => println("other")
+ }
+
+ def simpleTry {
+ try {
+ bla
+ } catch {
+ case x: Exception if x.getMessage == "test" => println("first case " + x)
+ case x: Exception => println("second case " + x)
+ }
+ }
+
+ def typedWildcardTry {
+ try { bla } catch { case _: ClassCastException => bla }
+ }
+
+ def wildcardTry {
+ try { bla } catch { case _ => bla }
+ }
+
+ def tryPlusFinally {
+ try { bla } finally { println("finally") }
+ }
+
+ def catchAndPassToLambda {
+ try { bla } catch { case ex: Exception => val f = () => ex }
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/Ctrie.scala b/test/files/scalacheck/Ctrie.scala
new file mode 100644
index 0000000000..2950937278
--- /dev/null
+++ b/test/files/scalacheck/Ctrie.scala
@@ -0,0 +1,199 @@
+
+
+
+import org.scalacheck._
+import Prop._
+import org.scalacheck.Gen._
+import collection._
+import collection.mutable.Ctrie
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i // * 0x9e3775cd
+}
+
+
+/** A check mainly oriented towards checking snapshot correctness.
+ */
+object Test extends Properties("Ctrie") {
+
+ /* generators */
+
+ val sizes = choose(0, 200000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ def spawn[T](body: =>T): { def get: T } = {
+ val t = new Thread {
+ setName("SpawnThread")
+ private var res: T = _
+ override def run() {
+ res = body
+ }
+ def result = res
+ }
+ t.start()
+ new {
+ def get: T = {
+ t.join()
+ t.result
+ }
+ }
+ }
+
+ def elementRange(threadIdx: Int, totalThreads: Int, totalElems: Int): Range = {
+ val sz = totalElems
+ val idx = threadIdx
+ val p = totalThreads
+ val start = (sz / p) * idx + math.min(idx, sz % p)
+ val elems = (sz / p) + (if (idx < sz % p) 1 else 0)
+ val end = start + elems
+ (start until end)
+ }
+
+ def hasGrown[K, V](last: Map[K, V], current: Map[K, V]) = {
+ (last.size <= current.size) && {
+ last forall {
+ case (k, v) => current.get(k) == Some(v)
+ }
+ }
+ }
+
+ object err {
+ var buffer = new StringBuilder
+ def println(a: AnyRef) = buffer.append(a.toString).append("\n")
+ def clear() = buffer.clear()
+ def flush() = {
+ Console.out.println(buffer)
+ clear()
+ }
+ }
+
+
+ /* properties */
+
+ property("concurrent growing snapshots") = forAll(threadCounts, sizes) {
+ (numThreads, numElems) =>
+ val p = 3 //numThreads
+ val sz = 102 //numElems
+ val ct = new Ctrie[Wrap, Int]
+
+ // checker
+ val checker = spawn {
+ def check(last: Map[Wrap, Int], iterationsLeft: Int): Boolean = {
+ val current = ct.readOnlySnapshot()
+ if (!hasGrown(last, current)) false
+ else if (current.size >= sz) true
+ else if (iterationsLeft < 0) false
+ else check(current, iterationsLeft - 1)
+ }
+ check(ct.readOnlySnapshot(), 500)
+ }
+
+ // fillers
+ inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) foreach (i => ct.update(Wrap(i), i))
+ }
+
+ // wait for checker to finish
+ val growing = true//checker.get
+
+ val ok = growing && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+
+ ok
+ }
+
+ property("update") = forAll(sizes) {
+ (n: Int) =>
+ val ct = new Ctrie[Int, Int]
+ for (i <- 0 until n) ct(i) = i
+ (0 until n) forall {
+ case i => ct(i) == i
+ }
+ }
+
+ property("concurrent update") = forAll(threadCountsAndSizes) {
+ case (p, sz) =>
+ val ct = new Ctrie[Wrap, Int]
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct(Wrap(i)) = i
+ }
+
+ (0 until sz) forall {
+ case i => ct(Wrap(i)) == i
+ }
+ }
+
+
+ property("concurrent remove") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new Ctrie[Wrap, Int]
+ for (i <- 0 until sz) ct(Wrap(i)) = i
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct.remove(Wrap(i))
+ }
+
+ (0 until sz) forall {
+ case i => ct.get(Wrap(i)) == None
+ }
+ }
+
+
+ property("concurrent putIfAbsent") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new Ctrie[Wrap, Int]
+
+ val results = inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) find (i => ct.putIfAbsent(Wrap(i), i) != None)
+ }
+
+ (results forall (_ == None)) && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala
index 51fb1fe8c3..af79ad49e3 100644
--- a/test/files/scalacheck/avl.scala
+++ b/test/files/scalacheck/avl.scala
@@ -47,21 +47,21 @@ package scala.collection.mutable {
}
}
- def genInput: Gen[(Int, List[AVLTree[Int]])] = for {
- size <- Gen.choose(20, 25)
- elements <- Gen.listOfN(size, Gen.choose(0, 1000))
- selected <- Gen.choose(0, 1000)
+ def genInput: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
+ size <- org.scalacheck.Gen.choose(20, 25)
+ elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
+ selected <- org.scalacheck.Gen.choose(0, 1000)
} yield {
// selected mustn't be in elements already
val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2))
(selected*2+1, list)
}
- def genInputDelete: Gen[(Int, List[AVLTree[Int]])] = for {
- size <- Gen.choose(20, 25)
- elements <- Gen.listOfN(size, Gen.choose(0, 1000))
+ def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
+ size <- org.scalacheck.Gen.choose(20, 25)
+ elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
e = elements.sorted.distinct
- selected <- Gen.choose(0, e.size-1)
+ selected <- org.scalacheck.Gen.choose(0, e.size-1)
} yield {
// selected must be in elements already
val list = makeAllBalancedTree(e)
@@ -111,4 +111,4 @@ package scala.collection.mutable {
object Test extends Properties("AVL") {
include(scala.collection.mutable.TestInsert)
include(scala.collection.mutable.TestRemove)
-} \ No newline at end of file
+}
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
new file mode 100644
index 0000000000..d1924f0ada
--- /dev/null
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -0,0 +1,98 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+abstract class ParallelCtrieCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParCtrie[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParCtrie[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val ct = new mutable.Ctrie[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) ct += sample(gen)
+ ct
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val pct = new ParCtrie[K, V]
+ var i = 0
+ for (kv <- t.toList) {
+ pct += kv
+ i += 1
+ }
+ pct
+ }
+
+}
+
+
+object IntIntParallelCtrieCheck extends ParallelCtrieCheck[Int, Int]("Int, Int")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParCtrie[k, v] =>
+ println("Mutable parallel ctrie")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 8273e302a2..e3f8778bca 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -86,7 +86,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
println("Collection debug info: ")
coll.printDebugBuffer
println("Task debug info: ")
- println(tasksupport.debugMessages.mkString("\n"))
+ println(coll.tasksupport.debugMessages.mkString("\n"))
}
def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int) {
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index cc0382303a..8a0dba3c25 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -25,6 +25,9 @@ class ParCollProperties extends Properties("Parallel collections") {
// parallel mutable hash maps (tables)
include(mutable.IntIntParallelHashMapCheck)
+ // parallel ctrie
+ include(mutable.IntIntParallelCtrieCheck)
+
// parallel mutable hash sets (tables)
include(mutable.IntParallelHashSetCheck)
diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala
index 1fcaa46f0e..bbc6504f58 100644
--- a/test/files/scalacheck/redblack.scala
+++ b/test/files/scalacheck/redblack.scala
@@ -7,7 +7,7 @@ Properties of a Red & Black Tree:
A node is either red or black.
The root is black. (This rule is used in some definitions and not others. Since the
-root can always be changed from red to black but not necessarily vice-versa this
+root can always be changed from red to black but not necessarily vice-versa this
rule has little effect on analysis.)
All leaves are black.
Both children of every red node are black.
@@ -21,17 +21,17 @@ abstract class RedBlackTest extends Properties("RedBlack") {
object RedBlackTest extends scala.collection.immutable.RedBlack[String] {
def isSmaller(x: String, y: String) = x < y
}
-
+
import RedBlackTest._
-
+
def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0)
Some(tree.iterator.drop(n).next)
else
None
-
+
def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key
-
- def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] =
+
+ def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] =
if (level == 0) {
value(Empty)
} else {
@@ -43,7 +43,7 @@ abstract class RedBlackTest extends Properties("RedBlack") {
left <- mkTree(nextLevel, !isRed, label + "L")
right <- mkTree(nextLevel, !isRed, label + "R")
} yield {
- if (isRed)
+ if (isRed)
RedTree(label + "N", 0, left, right)
else
BlackTree(label + "N", 0, left, right)
@@ -54,11 +54,11 @@ abstract class RedBlackTest extends Properties("RedBlack") {
depth <- choose(minimumSize, maximumSize + 1)
tree <- mkTree(depth)
} yield tree
-
+
type ModifyParm
def genParm(tree: Tree[Int]): Gen[ModifyParm]
def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int]
-
+
def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for {
tree <- genTree
parm <- genParm(tree)
@@ -67,41 +67,41 @@ abstract class RedBlackTest extends Properties("RedBlack") {
trait RedBlackInvariants {
self: RedBlackTest =>
-
+
import RedBlackTest._
-
+
def rootIsBlack[A](t: Tree[A]) = t.isBlack
-
+
def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match {
case Empty => t.isBlack
case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack
}
-
+
def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match {
- case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
+ case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
case Empty => true
}
-
+
def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match {
case Empty => List(1)
case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
}
-
+
def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match {
case Empty => true
- case ne: NonEmpty[_] =>
+ case ne: NonEmpty[_] =>
(
- blackNodesToLeaves(ne).distinct.size == 1
- && areBlackNodesToLeavesEqual(ne.left)
+ blackNodesToLeaves(ne).distinct.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
&& areBlackNodesToLeavesEqual(ne.right)
)
}
-
- def orderIsPreserved[A](t: Tree[A]): Boolean =
+
+ def orderIsPreserved[A](t: Tree[A]): Boolean =
t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) }
-
+
def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
invariant(newTree)
}
@@ -115,7 +115,7 @@ trait RedBlackInvariants {
object TestInsert extends RedBlackTest with RedBlackInvariants {
import RedBlackTest._
-
+
override type ModifyParm = Int
override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1)
override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0)
@@ -135,12 +135,12 @@ object TestInsert extends RedBlackTest with RedBlackInvariants {
object TestModify extends RedBlackTest {
import RedBlackTest._
-
+
def newValue = 1
override def minimumSize = 1
override type ModifyParm = Int
override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
- override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
+ override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
case (key, _) => tree update (key, newValue)
} getOrElse tree
@@ -157,10 +157,10 @@ object TestDelete extends RedBlackTest with RedBlackInvariants {
override def minimumSize = 1
override type ModifyParm = Int
override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
- override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
+ override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
case (key, _) => tree delete key
} getOrElse tree
-
+
property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
nodeAt(tree, parm) forall { case (key, _) =>
!treeContains(newTree, key)
@@ -170,7 +170,7 @@ object TestDelete extends RedBlackTest with RedBlackInvariants {
object TestRange extends RedBlackTest with RedBlackInvariants {
import RedBlackTest._
-
+
override type ModifyParm = (Option[Int], Option[Int])
override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for {
from <- choose(0, tree.iterator.size)
@@ -178,25 +178,25 @@ object TestRange extends RedBlackTest with RedBlackInvariants {
optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
} yield (optionalFrom, optionalTo)
-
+
override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = {
val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
tree range (from, to)
}
-
+
property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) &&
("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >))))
}
-
+
property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
val filteredTree = (tree.iterator
- .map(_._1)
+ .map(_._1)
.filter(key => from forall (key >=))
.filter(key => to forall (key <))
.toList)
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
new file mode 100644
index 0000000000..e4b356c889
--- /dev/null
+++ b/test/files/scalacheck/redblacktree.scala
@@ -0,0 +1,216 @@
+import collection.immutable.{RedBlackTree => RB}
+import org.scalacheck._
+import Prop._
+import Gen._
+
+/*
+Properties of a Red & Black Tree:
+
+A node is either red or black.
+The root is black. (This rule is used in some definitions and not others. Since the
+root can always be changed from red to black but not necessarily vice-versa this
+rule has little effect on analysis.)
+All leaves are black.
+Both children of every red node are black.
+Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
+*/
+
+package scala.collection.immutable.redblacktree {
+ abstract class RedBlackTreeTest extends Properties("RedBlackTree") {
+ def minimumSize = 0
+ def maximumSize = 5
+
+ import RB._
+
+ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0)
+ Some(iterator(tree).drop(n).next)
+ else
+ None
+
+ def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key
+
+ def height(tree: Tree[_, _]): Int = if (tree eq null) 0 else (1 + math.max(height(tree.left), height(tree.right)))
+
+ def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] =
+ if (level == 0) {
+ value(null)
+ } else {
+ for {
+ oddOrEven <- choose(0, 2)
+ tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
+ isRed = parentIsBlack && tryRed
+ nextLevel = if (isRed) level else level - 1
+ left <- mkTree(nextLevel, !isRed, label + "L")
+ right <- mkTree(nextLevel, !isRed, label + "R")
+ } yield {
+ if (isRed)
+ RedTree(label + "N", 0, left, right)
+ else
+ BlackTree(label + "N", 0, left, right)
+ }
+ }
+
+ def genTree = for {
+ depth <- choose(minimumSize, maximumSize + 1)
+ tree <- mkTree(depth)
+ } yield tree
+
+ type ModifyParm
+ def genParm(tree: Tree[String, Int]): Gen[ModifyParm]
+ def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int]
+
+ def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for {
+ tree <- genTree
+ parm <- genParm(tree)
+ } yield (tree, parm, modify(tree, parm))
+ }
+
+ trait RedBlackTreeInvariants {
+ self: RedBlackTreeTest =>
+
+ import RB._
+
+ def rootIsBlack[A](t: Tree[String, A]) = isBlack(t)
+
+ def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match {
+ case null => isBlack(t)
+ case ne => List(ne.left, ne.right) forall areAllLeavesBlack
+ }
+
+ def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match {
+ case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t))
+ case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
+ case null => true
+ }
+
+ def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match {
+ case null => List(1)
+ case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
+ case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
+ }
+
+ def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match {
+ case null => true
+ case ne =>
+ (
+ blackNodesToLeaves(ne).distinct.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
+ && areBlackNodesToLeavesEqual(ne.right)
+ )
+ }
+
+ def orderIsPreserved[A](t: Tree[String, A]): Boolean =
+ iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 }
+
+ def heightIsBounded(t: Tree[_, _]): Boolean = height(t) <= (2 * (32 - Integer.numberOfLeadingZeros(count(t) + 2)) - 2)
+
+ def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
+ invariant(newTree)
+ }
+
+ property("root is black") = setup(rootIsBlack)
+ property("all leaves are black") = setup(areAllLeavesBlack)
+ property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
+ property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
+ property("ordering of keys is preserved") = setup(orderIsPreserved)
+ property("height is bounded") = setup(heightIsBounded)
+ }
+
+ object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0)
+
+ def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
+ case Some((key, _)) => key.init.mkString + "MN"
+ case None => nodeAt(tree, parm - 1) match {
+ case Some((key, _)) => key.init.mkString + "RN"
+ case None => "N"
+ }
+ }
+
+ property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ treeContains(newTree, generateKey(tree, parm))
+ }
+ }
+
+ object TestModify extends RedBlackTreeTest {
+ import RB._
+
+ def newValue = 1
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => update(tree, key, newValue)
+ } getOrElse tree
+
+ property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree,parm) forall { case (key, _) =>
+ iterator(newTree) contains (key, newValue)
+ }
+ }
+ }
+
+ object TestDelete extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => delete(tree, key)
+ } getOrElse tree
+
+ property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree, parm) forall { case (key, _) =>
+ !treeContains(newTree, key)
+ }
+ }
+ }
+
+ object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Option[Int], Option[Int])
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(0, iterator(tree).size) suchThat (from <=)
+ optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
+ optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
+ } yield (optionalFrom, optionalTo)
+
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = {
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ rangeImpl(tree, from, to)
+ }
+
+ property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall (key <=)))) &&
+ ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall (key >))))
+ }
+
+ property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ val filteredTree = (keysIterator(tree)
+ .filter(key => from forall (key >=))
+ .filter(key => to forall (key <))
+ .toList)
+ filteredTree == keysIterator(newTree).toList
+ }
+ }
+}
+
+object Test extends Properties("RedBlackTree") {
+ import collection.immutable.redblacktree._
+ include(TestInsert)
+ include(TestModify)
+ include(TestDelete)
+ include(TestRange)
+}
diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala
new file mode 100644
index 0000000000..f672637c57
--- /dev/null
+++ b/test/files/scalacheck/treemap.scala
@@ -0,0 +1,154 @@
+import collection.immutable._
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+
+object Test extends Properties("TreeMap") {
+ def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[TreeMap[A, B]] =
+ for {
+ keys <- listOf(arbitrary[A])
+ values <- listOfN(keys.size, arbitrary[B])
+ } yield TreeMap(keys zip values: _*)
+ implicit def arbTreeMap[A : Arbitrary : Ordering, B : Arbitrary] = Arbitrary(genTreeMap[A, B])
+
+ property("foreach/iterator consistency") = forAll { (subject: TreeMap[Int, String]) =>
+ val it = subject.iterator
+ var consistent = true
+ subject.foreach { element =>
+ consistent &&= it.hasNext && element == it.next
+ }
+ consistent
+ }
+
+ property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) =>
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height
+ * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order.
+ *
+ * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure
+ * it is big enough for these worst-case trees.
+ */
+ val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2
+ val values = (1 to highest).reverse
+ val subject = TreeMap(values zip values: _*)
+ val it = subject.iterator
+ try { while (it.hasNext) it.next; true } catch { case _ => false }
+ }
+
+ property("sorted") = forAll { (subject: TreeMap[Int, String]) => (subject.size >= 3) ==> {
+ subject.zip(subject.tail).forall { case (x, y) => x._1 < y._1 }
+ }}
+
+ property("contains all") = forAll { (arr: List[(Int, String)]) =>
+ val subject = TreeMap(arr: _*)
+ arr.map(_._1).forall(subject.contains(_))
+ }
+
+ property("size") = forAll { (elements: List[(Int, Int)]) =>
+ val subject = TreeMap(elements: _*)
+ elements.map(_._1).distinct.size == subject.size
+ }
+
+ property("toSeq") = forAll { (elements: List[(Int, Int)]) =>
+ val subject = TreeMap(elements: _*)
+ elements.map(_._1).distinct.sorted == subject.toSeq.map(_._1)
+ }
+
+ property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeMap(elements zip elements: _*)
+ elements.min == subject.head._1
+ }}
+
+ property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeMap(elements zip elements: _*)
+ elements.max == subject.last._1
+ }}
+
+ property("head/tail identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ subject == (subject.tail + subject.head)
+ }}
+
+ property("init/last identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ subject == (subject.init + subject.last)
+ }}
+
+ property("take") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(0, subject.size).sample.get
+ n == subject.take(n).size && subject.take(n).forall(elt => subject.get(elt._1) == Some(elt._2))
+ }
+
+ property("drop") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(0, subject.size).sample.get
+ (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(elt => subject.get(elt._1) == Some(elt._2))
+ }
+
+ property("take/drop identity") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ subject == subject.take(n) ++ subject.drop(n)
+ }
+
+ property("splitAt") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ val (prefix, suffix) = subject.splitAt(n)
+ prefix == subject.take(n) && suffix == subject.drop(n)
+ }
+
+ def genSliceParms = for {
+ tree <- genTreeMap[Int, String]
+ from <- choose(0, tree.size)
+ until <- choose(from, tree.size)
+ } yield (tree, from, until)
+
+ property("slice") = forAll(genSliceParms) { case (subject, from, until) =>
+ val slice = subject.slice(from, until)
+ slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until)
+ }
+
+ property("takeWhile") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.takeWhile(_._1 < 0)
+ result.forall(_._1 < 0) && result == subject.take(result.size)
+ }
+
+ property("dropWhile") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.dropWhile(_._1 < 0)
+ result.forall(_._1 >= 0) && result == subject.takeRight(result.size)
+ }
+
+ property("span identity") = forAll { (subject: TreeMap[Int, String]) =>
+ val (prefix, suffix) = subject.span(_._1 < 0)
+ prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix
+ }
+
+ property("from is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val from = subject.drop(n).firstKey
+ subject.from(from).firstKey == from && subject.from(from).forall(_._1 >= from)
+ }}
+
+ property("to is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val to = subject.drop(n).firstKey
+ subject.to(to).lastKey == to && subject.to(to).forall(_._1 <= to)
+ }}
+
+ property("until is exclusive") = forAll { (subject: TreeMap[Int, String]) => subject.size > 1 ==> {
+ val n = choose(1, subject.size - 1).sample.get
+ val until = subject.drop(n).firstKey
+ subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_._1 <= until)
+ }}
+
+ property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val key = oneOf(subject.keys.toSeq).sample.get
+ val removed = subject - key
+ subject.contains(key) && !removed.contains(key) && subject.size - 1 == removed.size
+ }}
+
+ property("remove all") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.foldLeft(subject)((acc, elt) => acc - elt._1)
+ result.isEmpty
+ }
+}
diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala
new file mode 100644
index 0000000000..98e38c8219
--- /dev/null
+++ b/test/files/scalacheck/treeset.scala
@@ -0,0 +1,152 @@
+import collection.immutable._
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+
+object Test extends Properties("TreeSet") {
+ def genTreeSet[A: Arbitrary: Ordering]: Gen[TreeSet[A]] =
+ for {
+ elements <- listOf(arbitrary[A])
+ } yield TreeSet(elements: _*)
+ implicit def arbTreeSet[A : Arbitrary : Ordering]: Arbitrary[TreeSet[A]] = Arbitrary(genTreeSet)
+
+ property("foreach/iterator consistency") = forAll { (subject: TreeSet[Int]) =>
+ val it = subject.iterator
+ var consistent = true
+ subject.foreach { element =>
+ consistent &&= it.hasNext && element == it.next
+ }
+ consistent
+ }
+
+ property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) =>
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height
+ * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order.
+ *
+ * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure
+ * it is big enough for these worst-case trees.
+ */
+ val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2
+ val values = (1 to highest).reverse
+ val subject = TreeSet(values: _*)
+ val it = subject.iterator
+ try { while (it.hasNext) it.next; true } catch { case _ => false }
+ }
+
+ property("sorted") = forAll { (subject: TreeSet[Int]) => (subject.size >= 3) ==> {
+ subject.zip(subject.tail).forall { case (x, y) => x < y }
+ }}
+
+ property("contains all") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.forall(subject.contains)
+ }
+
+ property("size") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.distinct.size == subject.size
+ }
+
+ property("toSeq") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.distinct.sorted == subject.toSeq
+ }
+
+ property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeSet(elements: _*)
+ elements.min == subject.head
+ }}
+
+ property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeSet(elements: _*)
+ elements.max == subject.last
+ }}
+
+ property("head/tail identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ subject == (subject.tail + subject.head)
+ }}
+
+ property("init/last identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ subject == (subject.init + subject.last)
+ }}
+
+ property("take") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(0, subject.size).sample.get
+ n == subject.take(n).size && subject.take(n).forall(subject.contains)
+ }
+
+ property("drop") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(0, subject.size).sample.get
+ (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(subject.contains)
+ }
+
+ property("take/drop identity") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ subject == subject.take(n) ++ subject.drop(n)
+ }
+
+ property("splitAt") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ val (prefix, suffix) = subject.splitAt(n)
+ prefix == subject.take(n) && suffix == subject.drop(n)
+ }
+
+ def genSliceParms = for {
+ tree <- genTreeSet[Int]
+ from <- choose(0, tree.size)
+ until <- choose(from, tree.size)
+ } yield (tree, from, until)
+
+ property("slice") = forAll(genSliceParms) { case (subject, from, until) =>
+ val slice = subject.slice(from, until)
+ slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until)
+ }
+
+ property("takeWhile") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.takeWhile(_ < 0)
+ result.forall(_ < 0) && result == subject.take(result.size)
+ }
+
+ property("dropWhile") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.dropWhile(_ < 0)
+ result.forall(_ >= 0) && result == subject.takeRight(result.size)
+ }
+
+ property("span identity") = forAll { (subject: TreeSet[Int]) =>
+ val (prefix, suffix) = subject.span(_ < 0)
+ prefix.forall(_ < 0) && suffix.forall(_ >= 0) && subject == prefix ++ suffix
+ }
+
+ property("from is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val from = subject.drop(n).firstKey
+ subject.from(from).firstKey == from && subject.from(from).forall(_ >= from)
+ }}
+
+ property("to is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val to = subject.drop(n).firstKey
+ subject.to(to).lastKey == to && subject.to(to).forall(_ <= to)
+ }}
+
+ property("until is exclusive") = forAll { (subject: TreeSet[Int]) => subject.size > 1 ==> {
+ val n = choose(1, subject.size - 1).sample.get
+ val until = subject.drop(n).firstKey
+ subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_ <= until)
+ }}
+
+ property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val element = oneOf(subject.toSeq).sample.get
+ val removed = subject - element
+ subject.contains(element) && !removed.contains(element) && subject.size - 1 == removed.size
+ }}
+
+ property("remove all") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.foldLeft(subject)((acc, elt) => acc - elt)
+ result.isEmpty
+ }
+}
diff --git a/test/files/specialized/SI-5005.check b/test/files/specialized/SI-5005.check
new file mode 100644
index 0000000000..d2a97512ae
--- /dev/null
+++ b/test/files/specialized/SI-5005.check
@@ -0,0 +1,33 @@
+[[syntax trees at end of specialize]]// Scala source: newSource1
+package <empty> {
+ class C2[@specialized(scala.Boolean) U >: Nothing <: Any] extends Object with ScalaObject {
+ def <init>(): C2[U] = {
+ C2.super.<init>();
+ ()
+ };
+ def apply(x: U): U = x;
+ <specialized> def apply$mcZ$sp(x: Boolean): Boolean = C2.this.apply(x.asInstanceOf[U]()).asInstanceOf[Boolean]()
+ };
+ class B extends Object with ScalaObject {
+ def <init>(): B = {
+ B.super.<init>();
+ ()
+ };
+ new C2$mcZ$sp().apply$mcZ$sp(true)
+ };
+ <specialized> class C2$mcZ$sp extends C2[Boolean] {
+ <specialized> def <init>(): C2$mcZ$sp = {
+ C2$mcZ$sp.super.<init>();
+ ()
+ };
+ @inline final override <specialized> def apply(x: Boolean): Boolean = C2$mcZ$sp.this.apply$mcZ$sp(x);
+ @inline final override <specialized> def apply$mcZ$sp(x: Boolean): Boolean = x
+ }
+}
+
+[log inliner] Analyzing C2.apply count 0 with 1 blocks
+[log inliner] C2.apply blocks before inlining: 1 (2) after: 1 (2)
+[log inliner] Analyzing C2.apply$mcZ$sp count 0 with 1 blocks
+[log inliner] C2.apply$mcZ$sp blocks before inlining: 1 (8) after: 1 (8)
+[log inliner] Not inlining into apply because it is marked @inline.
+[log inliner] Not inlining into apply$mcZ$sp because it is marked @inline.
diff --git a/test/files/specialized/SI-5005.scala b/test/files/specialized/SI-5005.scala
new file mode 100644
index 0000000000..3d1ada49e2
--- /dev/null
+++ b/test/files/specialized/SI-5005.scala
@@ -0,0 +1,27 @@
+import scala.tools.partest._
+import java.io._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:spec -optimize -Ylog:inliner -d " + testOutput.path
+
+ override def code = """
+ class C2[@specialized(Boolean) U]() {
+ @inline final def apply(x: U): U = x
+ }
+
+ class B {
+ (new C2[Boolean]())(true)
+ }
+ """
+
+ override def show(): Unit = {
+ // redirect err to out, for inliner log
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/files/specialized/arrays-traits.check b/test/files/specialized/arrays-traits.check
index 92af4f13e1..40687a757e 100644
--- a/test/files/specialized/arrays-traits.check
+++ b/test/files/specialized/arrays-traits.check
@@ -1,6 +1,6 @@
-0
-0
-0
1
2
-1 \ No newline at end of file
+1
+3
+4
+2
diff --git a/test/files/specialized/arrays-traits.scala b/test/files/specialized/arrays-traits.scala
index de54d22d18..34a1c37a01 100644
--- a/test/files/specialized/arrays-traits.scala
+++ b/test/files/specialized/arrays-traits.scala
@@ -1,20 +1,12 @@
-
-
-
import runtime.ScalaRunTime._
-
-
trait SuperS[@specialized(AnyRef) T] {
def arr: Array[T]
def foo() = arr(0)
def bar(b: Array[T]) = b(0) = arr(0)
}
-
-class BaseS[@specialized(AnyRef) T](val arr: Array[T]) extends SuperS[T] {
-}
-
+class BaseS[@specialized(AnyRef) T](val arr: Array[T]) extends SuperS[T] { }
trait SuperG[T] {
def arr: Array[T]
@@ -22,13 +14,9 @@ trait SuperG[T] {
def bar(b: Array[T]) = b(0) = arr(0)
}
-
-class BaseG[T](val arr: Array[T]) extends SuperG[T] {
-}
-
+class BaseG[T](val arr: Array[T]) extends SuperG[T] { }
object Test {
-
def main(args: Array[String]) {
(new BaseS(new Array[String](1)): SuperS[String]).foo
println(arrayApplyCount)
@@ -42,5 +30,4 @@ object Test {
println(arrayApplyCount)
println(arrayUpdateCount)
}
-
}
diff --git a/test/files/specialized/arrays.check b/test/files/specialized/arrays.check
index d37dfb720d..8df790f413 100644
--- a/test/files/specialized/arrays.check
+++ b/test/files/specialized/arrays.check
@@ -1,4 +1,4 @@
-0
-0
50
-51 \ No newline at end of file
+51
+101
+102
diff --git a/test/instrumented/boxes.patch b/test/instrumented/boxes.patch
new file mode 100644
index 0000000000..11c5b37aa8
--- /dev/null
+++ b/test/instrumented/boxes.patch
@@ -0,0 +1,29 @@
+9a10,11
+> /* INSTRUMENTED VERSION */
+>
+50a53,61
+> public static int booleanBoxCount = 0;
+> public static int characterBoxCount = 0;
+> public static int byteBoxCount = 0;
+> public static int shortBoxCount = 0;
+> public static int integerBoxCount = 0;
+> public static int longBoxCount = 0;
+> public static int floatBoxCount = 0;
+> public static int doubleBoxCount = 0;
+>
+51a63
+> booleanBoxCount++;
+55a68
+> characterBoxCount++;
+59a73
+> byteBoxCount++;
+63a78
+> shortBoxCount++;
+67a83
+> integerBoxCount++;
+71a88
+> longBoxCount++;
+75a93
+> floatBoxCount++;
+79a98
+> doubleBoxCount++;
diff --git a/test/instrumented/library/scala/runtime/BoxesRunTime.java b/test/instrumented/library/scala/runtime/BoxesRunTime.java
index 797e9f89dd..f06f86f2f2 100644
--- a/test/instrumented/library/scala/runtime/BoxesRunTime.java
+++ b/test/instrumented/library/scala/runtime/BoxesRunTime.java
@@ -30,9 +30,9 @@ import scala.math.ScalaNumber;
* @contributor Stepan Koltsov
* @version 2.0 */
public final class BoxesRunTime
-{
+{
private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
-
+
private static int typeCode(Object a) {
if (a instanceof java.lang.Integer) return INT;
if (a instanceof java.lang.Byte) return BYTE;
@@ -43,13 +43,13 @@ public final class BoxesRunTime
if (a instanceof java.lang.Float) return FLOAT;
return OTHER;
}
-
+
private static String boxDescription(Object a) {
return "" + a.getClass().getSimpleName() + "(" + a + ")";
}
-
+
/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */
-
+
public static int booleanBoxCount = 0;
public static int characterBoxCount = 0;
public static int byteBoxCount = 0;
@@ -63,58 +63,58 @@ public final class BoxesRunTime
booleanBoxCount++;
return java.lang.Boolean.valueOf(b);
}
-
+
public static java.lang.Character boxToCharacter(char c) {
characterBoxCount++;
return java.lang.Character.valueOf(c);
}
-
+
public static java.lang.Byte boxToByte(byte b) {
byteBoxCount++;
return java.lang.Byte.valueOf(b);
}
-
+
public static java.lang.Short boxToShort(short s) {
shortBoxCount++;
return java.lang.Short.valueOf(s);
}
-
+
public static java.lang.Integer boxToInteger(int i) {
integerBoxCount++;
return java.lang.Integer.valueOf(i);
}
-
+
public static java.lang.Long boxToLong(long l) {
longBoxCount++;
return java.lang.Long.valueOf(l);
}
-
+
public static java.lang.Float boxToFloat(float f) {
floatBoxCount++;
return java.lang.Float.valueOf(f);
}
-
+
public static java.lang.Double boxToDouble(double d) {
doubleBoxCount++;
// System.out.println("box " + d);
// (new Throwable()).printStackTrace();
return java.lang.Double.valueOf(d);
}
-
+
/* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */
-
+
public static boolean unboxToBoolean(Object b) {
return b == null ? false : ((java.lang.Boolean)b).booleanValue();
}
-
+
public static char unboxToChar(Object c) {
return c == null ? 0 : ((java.lang.Character)c).charValue();
}
-
+
public static byte unboxToByte(Object b) {
return b == null ? 0 : ((java.lang.Byte)b).byteValue();
}
-
+
public static short unboxToShort(Object s) {
return s == null ? 0 : ((java.lang.Short)s).shortValue();
}
@@ -122,22 +122,22 @@ public final class BoxesRunTime
public static int unboxToInt(Object i) {
return i == null ? 0 : ((java.lang.Integer)i).intValue();
}
-
+
public static long unboxToLong(Object l) {
return l == null ? 0 : ((java.lang.Long)l).longValue();
}
-
+
public static float unboxToFloat(Object f) {
return f == null ? 0.0f : ((java.lang.Float)f).floatValue();
}
-
+
public static double unboxToDouble(Object d) {
// System.out.println("unbox " + d);
return d == null ? 0.0d : ((java.lang.Double)d).doubleValue();
}
/* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */
-
+
private static int eqTypeCode(Number a) {
if ((a instanceof java.lang.Integer) || (a instanceof java.lang.Byte)) return INT;
if (a instanceof java.lang.Long) return LONG;
@@ -146,8 +146,8 @@ public final class BoxesRunTime
if (a instanceof java.lang.Float) return FLOAT;
return OTHER;
}
-
- public static boolean equals(Object x, Object y) {
+
+ public static boolean equals(Object x, Object y) {
if (x == y) return true;
return equals2(x, y);
}
@@ -162,10 +162,10 @@ public final class BoxesRunTime
return equalsCharObject((java.lang.Character)x, y);
if (x == null)
return y == null;
-
+
return x.equals(y);
}
-
+
public static boolean equalsNumObject(java.lang.Number xn, Object y) {
if (y instanceof java.lang.Number)
return equalsNumNum(xn, (java.lang.Number)y);
@@ -173,10 +173,10 @@ public final class BoxesRunTime
return equalsNumChar(xn, (java.lang.Character)y);
if (xn == null)
return y == null;
-
+
return xn.equals(y);
}
-
+
public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) {
int xcode = eqTypeCode(xn);
int ycode = eqTypeCode(yn);
@@ -195,10 +195,10 @@ public final class BoxesRunTime
}
if (xn == null)
return yn == null;
-
+
return xn.equals(yn);
}
-
+
public static boolean equalsCharObject(java.lang.Character xc, Object y) {
if (y instanceof java.lang.Character)
return xc.charValue() == ((java.lang.Character)y).charValue();
@@ -206,7 +206,7 @@ public final class BoxesRunTime
return equalsNumChar((java.lang.Number)y, xc);
if (xc == null)
return y == null;
-
+
return xc.equals(y);
}
@@ -224,11 +224,11 @@ public final class BoxesRunTime
default:
if (xn == null)
return yc == null;
-
+
return xn.equals(yc);
}
}
-
+
/** Hashcode algorithm is driven by the requirements imposed
* by primitive equality semantics, namely that equal objects
* have equal hashCodes. The first priority are the integral/char
@@ -262,16 +262,16 @@ public final class BoxesRunTime
else return n.hashCode();
}
public static int hashFromDouble(java.lang.Double n) {
- int iv = n.intValue();
+ int iv = n.intValue();
double dv = n.doubleValue();
if (iv == dv) return iv;
-
+
long lv = n.longValue();
if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
else return n.hashCode();
}
public static int hashFromFloat(java.lang.Float n) {
- int iv = n.intValue();
+ int iv = n.intValue();
float fv = n.floatValue();
if (iv == fv) return iv;
@@ -289,9 +289,9 @@ public final class BoxesRunTime
if (a instanceof Number) return hashFromNumber((Number)a);
else return a.hashCode();
}
-
+
/* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */
-
+
/** arg1 + arg2 */
public static Object add(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
@@ -518,7 +518,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
/** -arg */
public static Object negate(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
@@ -540,7 +540,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
/** +arg */
public static Object positive(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
@@ -650,7 +650,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
/** ~arg */
public static Object complement(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
@@ -664,7 +664,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
/** !arg */
public static Object takeNot(Object arg) throws NoSuchMethodException {
if (arg instanceof Boolean) {
@@ -672,15 +672,15 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
public static Object testEqual(Object arg1, Object arg2) throws NoSuchMethodException {
return boxToBoolean(arg1 == arg2);
}
-
+
public static Object testNotEqual(Object arg1, Object arg2) throws NoSuchMethodException {
return boxToBoolean(arg1 != arg2);
}
-
+
public static Object testLessThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
@@ -707,7 +707,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
public static Object testLessOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
@@ -734,7 +734,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
public static Object testGreaterOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
@@ -761,7 +761,7 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
public static Object testGreaterThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
@@ -788,7 +788,25 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
+ public static boolean isBoxedNumberOrBoolean(Object arg) {
+ if (arg instanceof java.lang.Boolean)
+ return true;
+ else
+ return isBoxedNumber(arg);
+ }
+ public static boolean isBoxedNumber(Object arg) {
+ return (
+ (arg instanceof java.lang.Integer)
+ || (arg instanceof java.lang.Long)
+ || (arg instanceof java.lang.Double)
+ || (arg instanceof java.lang.Float)
+ || (arg instanceof java.lang.Short)
+ || (arg instanceof java.lang.Character)
+ || (arg instanceof java.lang.Byte)
+ );
+ }
+
/** arg.toChar */
public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg));
@@ -872,5 +890,5 @@ public final class BoxesRunTime
if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg));
throw new NoSuchMethodException();
}
-
+
}
diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
index a8a74dd8ab..9eb93a418d 100644
--- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala
+++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
@@ -6,70 +6,102 @@
** |/ **
\* */
-
+package scala.runtime
/* INSTRUMENTED VERSION */
-package scala.runtime
-
-import scala.reflect.ClassManifest
-import scala.collection.{ Seq, IndexedSeq, TraversableView }
+import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
-import scala.collection.immutable.{ NumericRange, List, Stream, Nil, :: }
+import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted }
-import scala.xml.{ Node, MetaData }
import scala.util.control.ControlThrowable
+import scala.xml.{ Node, MetaData }
+
+import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
-/* The object <code>ScalaRunTime</code> provides ...
+/** The object ScalaRunTime provides support methods required by
+ * the scala runtime. All these methods should be considered
+ * outside the API and subject to change or removal without notice.
*/
object ScalaRunTime {
def isArray(x: AnyRef): Boolean = isArray(x, 1)
- def isArray(x: Any, atLevel: Int): Boolean =
- x != null && isArrayClass(x.asInstanceOf[AnyRef].getClass, atLevel)
+ def isArray(x: Any, atLevel: Int): Boolean =
+ x != null && isArrayClass(x.getClass, atLevel)
private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
-
+ def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
var arrayApplyCount = 0
var arrayUpdateCount = 0
-
+
+ def isTuple(x: Any) = tupleNames(x.getClass.getName)
+ def isAnyVal(x: Any) = x match {
+ case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
+ case _ => false
+ }
+ // Avoiding boxing which messes up the specialized tests. Don't ask.
+ private val tupleNames = {
+ var i = 22
+ var names: List[String] = Nil
+ while (i >= 1) {
+ names ::= ("scala.Tuple" + String.valueOf(i))
+ i -= 1
+ }
+ names.toSet
+ }
+
+ /** Return the class object representing an unboxed value type,
+ * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
+ * rewrites expressions like 5.getClass to come here.
+ */
+ def anyValClass[T <: AnyVal](value: T): Class[T] = (value match {
+ case x: Byte => java.lang.Byte.TYPE
+ case x: Short => java.lang.Short.TYPE
+ case x: Char => java.lang.Character.TYPE
+ case x: Int => java.lang.Integer.TYPE
+ case x: Long => java.lang.Long.TYPE
+ case x: Float => java.lang.Float.TYPE
+ case x: Double => java.lang.Double.TYPE
+ case x: Boolean => java.lang.Boolean.TYPE
+ case x: Unit => java.lang.Void.TYPE
+ }).asInstanceOf[Class[T]]
+
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = {
arrayApplyCount += 1
xs match {
- case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
- case x: Array[Int] => x(idx).asInstanceOf[Any]
- case x: Array[Double] => x(idx).asInstanceOf[Any]
- case x: Array[Long] => x(idx).asInstanceOf[Any]
- case x: Array[Float] => x(idx).asInstanceOf[Any]
- case x: Array[Char] => x(idx).asInstanceOf[Any]
- case x: Array[Byte] => x(idx).asInstanceOf[Any]
- case x: Array[Short] => x(idx).asInstanceOf[Any]
- case x: Array[Boolean] => x(idx).asInstanceOf[Any]
- case x: Array[Unit] => x(idx).asInstanceOf[Any]
- case null => throw new NullPointerException
- }
+ case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
+ case x: Array[Int] => x(idx).asInstanceOf[Any]
+ case x: Array[Double] => x(idx).asInstanceOf[Any]
+ case x: Array[Long] => x(idx).asInstanceOf[Any]
+ case x: Array[Float] => x(idx).asInstanceOf[Any]
+ case x: Array[Char] => x(idx).asInstanceOf[Any]
+ case x: Array[Byte] => x(idx).asInstanceOf[Any]
+ case x: Array[Short] => x(idx).asInstanceOf[Any]
+ case x: Array[Boolean] => x(idx).asInstanceOf[Any]
+ case x: Array[Unit] => x(idx).asInstanceOf[Any]
+ case null => throw new NullPointerException
+ }
}
/** update generic array element */
def array_update(xs: AnyRef, idx: Int, value: Any): Unit = {
arrayUpdateCount += 1
xs match {
- case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
- case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
- case x: Array[Double] => x(idx) = value.asInstanceOf[Double]
- case x: Array[Long] => x(idx) = value.asInstanceOf[Long]
- case x: Array[Float] => x(idx) = value.asInstanceOf[Float]
- case x: Array[Char] => x(idx) = value.asInstanceOf[Char]
- case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte]
- case x: Array[Short] => x(idx) = value.asInstanceOf[Short]
- case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean]
- case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit]
- case null => throw new NullPointerException
- }
+ case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
+ case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
+ case x: Array[Double] => x(idx) = value.asInstanceOf[Double]
+ case x: Array[Long] => x(idx) = value.asInstanceOf[Long]
+ case x: Array[Float] => x(idx) = value.asInstanceOf[Float]
+ case x: Array[Char] => x(idx) = value.asInstanceOf[Char]
+ case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte]
+ case x: Array[Short] => x(idx) = value.asInstanceOf[Short]
+ case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean]
+ case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit]
+ case null => throw new NullPointerException
+ }
}
/** Get generic array length */
@@ -85,7 +117,7 @@ object ScalaRunTime {
case x: Array[Boolean] => x.length
case x: Array[Unit] => x.length
case null => throw new NullPointerException
- }
+ }
def array_clone(xs: AnyRef): AnyRef = xs match {
case x: Array[AnyRef] => ArrayRuntime.cloneArray(x)
@@ -122,7 +154,7 @@ object ScalaRunTime {
}
arr
}
-
+
// Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
// More background at ticket #2318.
def ensureAccessible(m: JMethod): JMethod = {
@@ -130,10 +162,10 @@ object ScalaRunTime {
try m setAccessible true
catch { case _: SecurityException => () }
}
- m
+ m
}
- def checkInitialized[T <: AnyRef](x: T): T =
+ def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
abstract class Try[+A] {
@@ -143,9 +175,9 @@ object ScalaRunTime {
def Try[A](block: => A): Try[A] = new Try[A] with Runnable {
private var result: A = _
- private var exception: Throwable =
+ private var exception: Throwable =
try { run() ; null }
- catch {
+ catch {
case e: ControlThrowable => throw e // don't catch non-local returns etc
case e: Throwable => e
}
@@ -167,27 +199,26 @@ object ScalaRunTime {
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
-
- def _hashCode(x: Product): Int = {
- import scala.util.MurmurHash._
- val arr = x.productArity
- var h = startHash(arr)
- var c = startMagicA
- var k = startMagicB
- var i = 0
- while (i < arr) {
- val elem = x.productElement(i)
- h = extendHash(h, if (elem == null) 0 else elem.##, c, k)
- c = nextMagicA(c)
- k = nextMagicB(k)
- i += 1
+
+ def _hashCode(x: Product): Int = scala.util.MurmurHash3.productHash(x)
+
+ /** A helper for case classes. */
+ def typedProductIterator[T](x: Product): Iterator[T] = {
+ new AbstractIterator[T] {
+ private var c: Int = 0
+ private val cmax = x.productArity
+ def hasNext = c < cmax
+ def next() = {
+ val result = x.productElement(c)
+ c += 1
+ result.asInstanceOf[T]
+ }
}
- finalizeHash(h)
}
/** Fast path equality method for inlining; used when -optimise is set.
*/
- @inline def inlinedEquals(x: Object, y: Object): Boolean =
+ @inline def inlinedEquals(x: Object, y: Object): Boolean =
if (x eq y) true
else if (x eq null) false
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
@@ -198,20 +229,21 @@ object ScalaRunTime {
case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator
case _ => false
}
-
+
// hashcode -----------------------------------------------------------
//
// Note that these are the implementations called by ##, so they
// must not call ## themselves.
-
+
@inline def hash(x: Any): Int =
- if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
+ if (x == null) 0
+ else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
-
+
@inline def hash(dv: Double): Int = {
val iv = dv.toInt
if (iv == dv) return iv
-
+
val lv = dv.toLong
if (lv == dv) return lv.hashCode
@@ -221,31 +253,27 @@ object ScalaRunTime {
@inline def hash(fv: Float): Int = {
val iv = fv.toInt
if (iv == fv) return iv
-
+
val lv = fv.toLong
- if (lv == fv) return lv.hashCode
+ if (lv == fv) return hash(lv)
else fv.hashCode
}
@inline def hash(lv: Long): Int = {
- val iv = lv.toInt
- if (iv == lv) iv else lv.hashCode
+ val low = lv.toInt
+ val lowSign = low >>> 31
+ val high = (lv >>> 32).toInt
+ low ^ (high + lowSign)
}
+ @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+
+ // The remaining overloads are here for completeness, but the compiler
+ // inlines these definitions directly so they're not generally used.
@inline def hash(x: Int): Int = x
@inline def hash(x: Short): Int = x.toInt
@inline def hash(x: Byte): Int = x.toInt
@inline def hash(x: Char): Int = x.toInt
- @inline def hash(x: Boolean): Int = x.hashCode
+ @inline def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
@inline def hash(x: Unit): Int = 0
-
- @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
-
- /** XXX Why is there one boxed implementation in here? It would seem
- * we should have all the numbers or none of them.
- */
- @inline def hash(x: java.lang.Long): Int = {
- val iv = x.intValue
- if (iv == x.longValue) iv else x.hashCode
- }
/** A helper method for constructing case class equality methods,
* because existential types get in the way of a clean outcome and
@@ -263,17 +291,13 @@ object ScalaRunTime {
* called on null and (b) depending on the apparent type of an
* array, toString may or may not print it in a human-readable form.
*
- * @param arg the value to stringify
- * @return a string representation of <code>arg</code>
- *
- */
+ * @param arg the value to stringify
+ * @return a string representation of arg.
+ */
def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue)
- def stringOf(arg: Any, maxElements: Int): String = {
+ def stringOf(arg: Any, maxElements: Int): String = {
def isScalaClass(x: AnyRef) =
Option(x.getClass.getPackage) exists (_.getName startsWith "scala.")
-
- def isTuple(x: AnyRef) =
- x.getClass.getName matches """^scala\.Tuple(\d+).*"""
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
@@ -283,8 +307,8 @@ object ScalaRunTime {
case _: Range | _: NumericRange[_] => true
// Sorted collections to the wrong thing (for us) on iteration - ticket #3493
case _: Sorted[_, _] => true
- // StringBuilder(a, b, c) is not so attractive
- case _: StringBuilder => true
+ // StringBuilder(a, b, c) and similar not so attractive
+ case _: StringLike[_] => true
// Don't want to evaluate any elements in a view
case _: TraversableView[_, _] => true
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
@@ -299,14 +323,27 @@ object ScalaRunTime {
case (k, v) => inner(k) + " -> " + inner(v)
case _ => inner(arg)
}
- // The recursively applied attempt to prettify Array printing
+
+ // Special casing Unit arrays, the value class which uses a reference array type.
+ def arrayToString(x: AnyRef) = {
+ if (x.getClass.getComponentType == classOf[BoxedUnit])
+ 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")")
+ else
+ WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
+ }
+
+ // The recursively applied attempt to prettify Array printing.
+ // Note that iterator is used if possible and foreach is used as a
+ // last resort, because the parallel collections "foreach" in a
+ // random order even on sequences.
def inner(arg: Any): String = arg match {
case null => "null"
case "" => "\"\""
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x
- case x if useOwnToString(x) => x.toString
- case x: AnyRef if isArray(x) => WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
- case x: collection.Map[_, _] => x take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x if useOwnToString(x) => x toString
+ case x: AnyRef if isArray(x) => arrayToString(x)
+ case x: collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
@@ -315,13 +352,31 @@ object ScalaRunTime {
// The try/catch is defense against iterables which aren't actually designed
// to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes.
- val s =
- try inner(arg)
- catch {
- case _: StackOverflowError | _: UnsupportedOperationException => arg.toString
- }
-
+ try inner(arg)
+ catch {
+ case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
+ }
+ }
+
+ /** stringOf formatted for use in a repl result. */
+ def replStringOf(arg: Any, maxElements: Int): String = {
+ val s = stringOf(arg, maxElements)
val nl = if (s contains "\n") "\n" else ""
- nl + s + "\n"
+
+ nl + s + "\n"
+ }
+ private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
+ if (sys.props contains "scala.debug.zip") {
+ val xs = coll1.toIndexedSeq
+ val ys = coll2.toIndexedSeq
+ if (xs.length != ys.length) {
+ Console.err.println(
+ "Mismatched zip in " + what + ":\n" +
+ " this: " + xs.mkString(", ") + "\n" +
+ " that: " + ys.mkString(", ")
+ )
+ (new Exception).getStackTrace.drop(2).take(10).foreach(println)
+ }
+ }
}
}
diff --git a/test/instrumented/mkinstrumented b/test/instrumented/mkinstrumented
deleted file mode 100755
index a87e8cb94f..0000000000
--- a/test/instrumented/mkinstrumented
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-#
-# Used to compile a jar with instrumented versions of certain classes.
-#
-
-
-
-
-if [ $# -ne 1 ]
-then
- echo "Must provide build dir ('target' or 'build')."
- exit 1
-fi
-
-
-BUILDDIR=$1
-TOPDIR=../..
-SCALAC=$TOPDIR/$BUILDDIR/pack/bin/scalac
-SRC_DIR=library/
-SCALALIB=$TOPDIR/$BUILDDIR/pack/lib/scala-library.jar
-CLASSDIR=classes/
-ARTIFACT=instrumented.jar
-
-
-# compile it
-rm -rf $CLASSDIR
-mkdir $CLASSDIR
-JSOURCES=`find $SRC_DIR -name "*.java" -print`
-SOURCES=`find $SRC_DIR \( -name "*.scala" -o -name "*.java" \) -print`
-echo $SOURCES
-$SCALAC -d $CLASSDIR $SOURCES
-javac -cp $SCALALIB -d $CLASSDIR $JSOURCES
-
-
-# jar it up
-rm $ARTIFACT
-cd $CLASSDIR
-jar cf $ARTIFACT .
-mv $ARTIFACT ../
-cd ..
-
-
-
-
-
-
diff --git a/test/instrumented/mkinstrumented.sh b/test/instrumented/mkinstrumented.sh
new file mode 100755
index 0000000000..d734dd2e00
--- /dev/null
+++ b/test/instrumented/mkinstrumented.sh
@@ -0,0 +1,51 @@
+#/bin/sh
+#
+# Used to compile a jar with instrumented versions of certain classes.
+#
+
+set -e
+
+run () {
+ echo "% $@"
+ "$@"
+}
+
+if [ $# -ne 1 ]
+then
+ echo "Must provide build dir ('target' or 'build')."
+ exit 1
+fi
+
+scriptDir=$(cd $(dirname $0) && pwd)
+
+TOPDIR="$scriptDir/../.."
+RUNTIME="$TOPDIR/src/library/scala/runtime"
+SOURCES="$RUNTIME/BoxesRunTime.java $RUNTIME/ScalaRunTime.scala"
+SCALAC=$TOPDIR/$1/pack/bin/scalac
+SRC_DIR="$scriptDir/library/scala/runtime"
+SCALALIB=$TOPDIR/$1/pack/lib/scala-library.jar
+CLASSDIR="$scriptDir/classes"
+ARTIFACT=instrumented.jar
+DESTINATION="$TOPDIR/test/files/speclib"
+
+[[ -x "$SCALAC" ]] || exit 1;
+
+# compile it
+run rm -rf $CLASSDIR && mkdir $CLASSDIR
+run cp $SOURCES $SRC_DIR
+( cd $SRC_DIR && run patch BoxesRunTime.java $scriptDir/boxes.patch && run patch ScalaRunTime.scala $scriptDir/srt.patch )
+
+ORIG=$(find $SRC_DIR -name '*.orig')
+[[ -z "$ORIG" ]] || rm -f $ORIG
+
+JSOURCES=$(find $SRC_DIR -name "*.java" -print)
+SOURCES=$(find $SRC_DIR -type f -print)
+# echo $SOURCES
+run $SCALAC -d $CLASSDIR $SOURCES
+run javac -cp $SCALALIB -d $CLASSDIR $JSOURCES
+
+# jar it up
+run cd $CLASSDIR
+run jar cf $ARTIFACT .
+run mv -f $ARTIFACT "$DESTINATION"
+echo "$(cd "$DESTINATION" && pwd)/$ARTIFACT has been created." \ No newline at end of file
diff --git a/test/instrumented/srt.patch b/test/instrumented/srt.patch
new file mode 100644
index 0000000000..2f472ff1c0
--- /dev/null
+++ b/test/instrumented/srt.patch
@@ -0,0 +1,23 @@
+9a10,11
+> /* INSTRUMENTED VERSION */
+>
+33a36,38
+> var arrayApplyCount = 0
+> var arrayUpdateCount = 0
+>
+35c40,42
+< def array_apply(xs: AnyRef, idx: Int): Any = xs match {
+---
+> def array_apply(xs: AnyRef, idx: Int): Any = {
+> arrayApplyCount += 1
+> xs match {
+47a55
+> }
+50c58,60
+< def array_update(xs: AnyRef, idx: Int, value: Any): Unit = xs match {
+---
+> def array_update(xs: AnyRef, idx: Int, value: Any): Unit = {
+> arrayUpdateCount += 1
+> xs match {
+62a73
+> }
diff --git a/test/pending/pos/bug4704.scala b/test/pending/pos/bug4704.scala
new file mode 100644
index 0000000000..6af719adf7
--- /dev/null
+++ b/test/pending/pos/bug4704.scala
@@ -0,0 +1,36 @@
+trait Bar {
+ def f1 = super.hashCode
+ def f2 = super[Object].hashCode
+ def f3 = super[ScalaObject].hashCode
+
+ override def hashCode = 1
+}
+trait Barzoo {
+ def g1 = super.hashCode
+ def g2 = super[Object].hashCode
+ def g3 = super[ScalaObject].hashCode
+
+ override def hashCode = 2
+}
+
+trait Foo extends Bar with Barzoo {
+ def f4 = super.hashCode
+ def f5 = super[Object].hashCode
+ def f6 = super[ScalaObject].hashCode
+ def f6b = super[Bar].hashCode
+ def g4 = super[Barzoo].hashCode
+
+ override def hashCode = super[Bar].hashCode + super[Barzoo].hashCode
+}
+
+class Quux extends Foo {
+ override def hashCode = super.hashCode + super[Object].hashCode + super[ScalaObject].hashCode + super[Foo].hashCode
+}
+
+trait Borp extends Quux {
+ def f12 = super.hashCode
+ def f14 = super[ScalaObject].hashCode
+ def f15 = super[Quux].hashCode
+ override def hashCode = super[Quux].hashCode
+}
+
diff --git a/test/pending/pos/inference.scala b/test/pending/pos/inference.scala
new file mode 100644
index 0000000000..d28d003435
--- /dev/null
+++ b/test/pending/pos/inference.scala
@@ -0,0 +1,39 @@
+// inference illuminator
+object Test {
+ class D1[T1 : Manifest, T2 <: T1 : Manifest](x: T1) { println(manifest[(T1, T2)]) }
+ class D2[T1 : Manifest, T2 >: T1 : Manifest](x: T1) { println(manifest[(T1, T2)]) }
+ class D3[+T1 : Manifest, T2 <: T1 : Manifest](x: T1) { println(manifest[(T1, T2)]) }
+ class D4[-T1 : Manifest, T2 >: T1 : Manifest](x: T1) { println(manifest[(T1, T2)]) }
+
+ class E1[T1 : Manifest, T2 <: T1 : Manifest](x: D1[T1, T2]) { println(manifest[(T1, T2)]) }
+ class E2[T1 : Manifest, T2 >: T1 : Manifest](x: D2[T1, T2]) { println(manifest[(T1, T2)]) }
+ class E3[+T1 : Manifest, T2 <: T1 : Manifest](x: D3[T1, T2]) { println(manifest[(T1, T2)]) }
+ class E4[-T1 : Manifest, T2 >: T1 : Manifest](x: D4[T1, T2]) { println(manifest[(T1, T2)]) }
+
+ def main(args: Array[String]): Unit = {
+ // WHY YOU NO LIKE NOTHING SO MUCH SCALAC?
+ val d1 = new D1(5)
+ val d2 = new D2(5)
+ val d3 = new D3(5)
+ val d4 = new D4(5)
+
+ new E1(d1) // fails
+ new E2(d2)
+ new E3(d3) // fails
+ new E4(d4)
+ }
+ // found : Test.D1[Int,Nothing]
+ // required: Test.D1[Int,T2]
+ // Note: Nothing <: T2, but class D1 is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // new E1(d1)
+ // ^
+ // test/pending/pos/inference.scala:22: error: type mismatch;
+ // found : Test.D3[Int,Nothing]
+ // required: Test.D3[Int,T2]
+ // Note: Nothing <: T2, but class D3 is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // new E3(d3)
+ // ^
+ // two errors found
+}
diff --git a/test/pending/pos/nothing.scala b/test/pending/pos/nothing.scala
new file mode 100644
index 0000000000..f76017fb16
--- /dev/null
+++ b/test/pending/pos/nothing.scala
@@ -0,0 +1,24 @@
+// More shoddy treatment for nothing.
+class A {
+ class Q3A[+T1, T2 <: T1](x: T1)
+ class Q3B[+T1, T2 <: T1](x: Q3A[T1, T2])
+
+ val x1 = new Q3B(new Q3A("a"))
+ val x2 = new Q3B(new Q3A[String, Nothing]("a"))
+ val x3 = new Q3B(new Q3A[String, Null]("a"))
+ // test/pending/pos/nothing.scala:5: error: type mismatch;
+ // found : A.this.Q3A[String,Nothing]
+ // required: A.this.Q3A[String,T2]
+ // Note: Nothing <: T2, but class Q3A is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // val x1 = new Q3B(new Q3A("a"))
+ // ^
+ // test/pending/pos/nothing.scala:6: error: type mismatch;
+ // found : A.this.Q3A[String,Nothing]
+ // required: A.this.Q3A[String,T2]
+ // Note: Nothing <: T2, but class Q3A is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // val x2 = new Q3B(new Q3A[String, Nothing]("a"))
+ // ^
+ // two errors found
+}
diff --git a/test/pending/pos/t1380/gnujaxp.jar.desired.sha1 b/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
deleted file mode 100644
index c155c2aaa2..0000000000
--- a/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ee000286d00c5209d5644462c1cfea87fc8b1342 ?gnujaxp.jar
diff --git a/test/pending/pos/t1380/hallo.scala b/test/pending/pos/t1380/hallo.scala
deleted file mode 100644
index bb8fff2333..0000000000
--- a/test/pending/pos/t1380/hallo.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object hallo {
- def main(args:Array[String]) = println("hallo")
-}
diff --git a/test/pending/pos/t4012.scala b/test/pending/pos/t4012.scala
new file mode 100644
index 0000000000..9b8a1b0dbe
--- /dev/null
+++ b/test/pending/pos/t4012.scala
@@ -0,0 +1,7 @@
+trait C1[+A] {
+ def head: A = sys.error("")
+}
+trait C2[@specialized +A] extends C1[A] {
+ override def head: A = super.head
+}
+class C3 extends C2[Char] \ No newline at end of file
diff --git a/test/pending/pos/t4123.scala b/test/pending/pos/t4123.scala
new file mode 100644
index 0000000000..82ab16b4e4
--- /dev/null
+++ b/test/pending/pos/t4123.scala
@@ -0,0 +1,14 @@
+// /scala/trac/4123/a.scala
+// Sun Feb 19 00:08:53 PST 2012
+
+trait Iter[@specialized(Byte) +A] extends Iterator[A] {
+ self =>
+
+ override def map[B](f: (A) => B) = super.map(f)
+}
+
+class ByteIter extends Iter[Byte] {
+ var i = 0
+ def hasNext = i < 3
+ def next = { i += 1 ; i.toByte }
+} \ No newline at end of file
diff --git a/test/pending/pos/t4436.scala b/test/pending/pos/t4436.scala
new file mode 100644
index 0000000000..acbf0beae6
--- /dev/null
+++ b/test/pending/pos/t4436.scala
@@ -0,0 +1,3 @@
+trait Chunk[@specialized +A] {
+ def bippy[@specialized B >: A](e: B): Chunk[B]
+} \ No newline at end of file
diff --git a/test/pending/pos/t4541.scala b/test/pending/pos/t4541.scala
new file mode 100644
index 0000000000..c6d9672cc5
--- /dev/null
+++ b/test/pending/pos/t4541.scala
@@ -0,0 +1,10 @@
+@SerialVersionUID(1L)
+final class SparseArray[@specialized T](private var data : Array[T]) extends Serializable {
+ def use(inData : Array[T]) = {
+ data = inData;
+ }
+
+ def set(that : SparseArray[T]) = {
+ use(that.data.clone)
+ }
+} \ No newline at end of file
diff --git a/test/pending/pos/t4717.scala b/test/pending/pos/t4717.scala
new file mode 100644
index 0000000000..43cf412bc6
--- /dev/null
+++ b/test/pending/pos/t4717.scala
@@ -0,0 +1,7 @@
+trait Bug1[@specialized +A] extends TraversableOnce[A] {
+ def ++[B >: A](that: TraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ lazy val it = that.toIterator
+ def hasNext = it.hasNext
+ def next = it.next
+ }
+} \ No newline at end of file
diff --git a/test/pending/pos/t4786.scala b/test/pending/pos/t4786.scala
new file mode 100644
index 0000000000..f0579142b8
--- /dev/null
+++ b/test/pending/pos/t4786.scala
@@ -0,0 +1,24 @@
+trait Matrix[@specialized A, Repr[C] <: Matrix[C, Repr]] { // crash goes away if @specialize is removed
+ def duplicate(mb: MatrixBuilder[A, Repr]): Repr[A] = {
+ mb.zeros
+ }
+}
+trait DenseMatrix[@specialized A] extends Matrix[A, DenseMatrix]
+trait DenseMatrixFlt extends DenseMatrix[Float]
+
+trait MatrixBuilder[@specialized A, Repr[C] <: Matrix[C, Repr]] {
+ def zeros: Repr[A]
+}
+object DenseFloatBuilder extends MatrixBuilder[Float, DenseMatrix] {
+ val zeros = new Object with DenseMatrixFlt
+ // Note:
+ // - in 2.9 crash goes away if the explicit type "DenseMatrixFlt" is assigned to "zeros"
+ // - in 2.9 crash goes away if DenseMatrixFlt is a class instead of a trait:
+ // val zeros = new DenseMatrixFlt
+}
+
+object Test extends App {
+ val m1 = DenseFloatBuilder.zeros // in 2.9 crash goes away if explicit type "DenseMatrixFlt" is assigned to m1
+ val m2 = m1.duplicate(DenseFloatBuilder)
+}
+
diff --git a/test/pending/pos/t4787.scala b/test/pending/pos/t4787.scala
new file mode 100644
index 0000000000..cf3fe93c50
--- /dev/null
+++ b/test/pending/pos/t4787.scala
@@ -0,0 +1,4 @@
+trait MatrixImpl[@specialized A, @specialized B] {
+ def mapTo[ A2, B2, That <: MatrixImpl[A2, B2]](that: That)(f: A => A2) {
+ }
+}
diff --git a/test/pending/pos/t4790.scala b/test/pending/pos/t4790.scala
new file mode 100644
index 0000000000..e451fe80ab
--- /dev/null
+++ b/test/pending/pos/t4790.scala
@@ -0,0 +1,4 @@
+package spectest {
+ class Sp[@specialized A, B](val a: A, val b: B) { }
+ class Fsp[@specialized A, B](a: A, b: B) extends Sp(a,b) { def ab = (a,b) }
+}
diff --git a/test/pending/pos/t5259.scala b/test/pending/pos/t5259.scala
new file mode 100644
index 0000000000..317e28a9dc
--- /dev/null
+++ b/test/pending/pos/t5259.scala
@@ -0,0 +1,14 @@
+object DefaultArgBogusTypeMismatch {
+
+ class A[T]
+ class B {
+ type T = this.type
+ def m(implicit a : A[T] = new A[T]) = a
+ }
+
+ def newB = new B
+ val a1 = newB.m // Bogus type mismatch
+
+ val stableB = new B
+ val a2 = stableB.m // OK
+}
diff --git a/test/pending/pos/t5399.scala b/test/pending/pos/t5399.scala
new file mode 100644
index 0000000000..d8c1d5e51c
--- /dev/null
+++ b/test/pending/pos/t5399.scala
@@ -0,0 +1,15 @@
+class Test {
+ type AnyCyclic = Execute[Task]#CyclicException[_]
+
+ trait Task[T]
+
+ trait Execute[A[_] <: AnyRef] {
+ class CyclicException[T](val caller: A[T], val target: A[T])
+ }
+
+ def convertCyclic(c: AnyCyclic): String =
+ (c.caller, c.target) match {
+ case (caller: Task[_], target: Task[_]) => "bazinga!"
+ }
+}
+
diff --git a/test/pending/pos/t5400.scala b/test/pending/pos/t5400.scala
new file mode 100644
index 0000000000..cb4be4bde5
--- /dev/null
+++ b/test/pending/pos/t5400.scala
@@ -0,0 +1,14 @@
+trait TFn1B {
+ type In
+ type Out
+ type Apply[T <: In] <: Out
+}
+
+trait TFn1[I, O] extends TFn1B {
+ type In = I
+ type Out = O
+}
+
+trait >>[F1 <: TFn1[_, _], F2 <: TFn1[_, _]] extends TFn1[F1#In, F2#Out] {
+ type Apply[T] = F2#Apply[F1#Apply[T]]
+}
diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala
index 3012e72d7e..434e64cefb 100644
--- a/test/pending/pos/those-kinds-are-high.scala
+++ b/test/pending/pos/those-kinds-are-high.scala
@@ -27,11 +27,27 @@ class A {
//
// List[Container[String] with Template[Container[Any] with Template[Container[Any] with Template[Any]]]
//
+ // *** Update 2/24/2012
+ //
+ // Hey, now there are polytypes in the inferred type.
+ // Not sure if that is progress or regress.
+ //
+ // test/pending/pos/those-kinds-are-high.scala:36: error: type mismatch;
+ // found : C1[String]
+ // required: ScalaObject with Container[String] with Template[ScalaObject with Container with Template[ScalaObject with Container with Template[[X]Container[X]]]]
+ // def fFail = List(new C1[String], new C2[String])
+ // ^
+ // test/pending/pos/those-kinds-are-high.scala:36: error: type mismatch;
+ // found : C2[String]
+ // required: ScalaObject with Container[String] with Template[ScalaObject with Container with Template[ScalaObject with Container with Template[[X]Container[X]]]]
+ // def fFail = List(new C1[String], new C2[String])
+ // ^
+ // two errors found
/** Working version explicitly typed.
*/
def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String])
// nope
- // def fFail = List(new C1[String], new C2[String])
+ def fFail = List(new C1[String], new C2[String])
}
diff --git a/test/pending/run/bug4704run.scala b/test/pending/run/bug4704run.scala
new file mode 100644
index 0000000000..af488a56c7
--- /dev/null
+++ b/test/pending/run/bug4704run.scala
@@ -0,0 +1,10 @@
+trait MM {
+ protected def method = "bip"
+}
+trait NN {
+ protected def method = "bop"
+}
+trait OOOOO extends MM with NN {
+ override protected def method = super[MM].method + super[NN].method
+ override def hashCode = super[MM].hashCode + super[NN].hashCode
+}
diff --git a/test/pending/run/macro-overload.check b/test/pending/run/macro-overload.check
new file mode 100644
index 0000000000..764f914e48
--- /dev/null
+++ b/test/pending/run/macro-overload.check
@@ -0,0 +1,4 @@
+object-Int
+object-String
+class-Int
+class-String \ No newline at end of file
diff --git a/test/pending/run/macro-overload.flags b/test/pending/run/macro-overload.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/pending/run/macro-overload.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/pending/run/macro-overload/Macros_1.scala b/test/pending/run/macro-overload/Macros_1.scala
new file mode 100644
index 0000000000..f24c69ea7b
--- /dev/null
+++ b/test/pending/run/macro-overload/Macros_1.scala
@@ -0,0 +1,9 @@
+object Macros {
+ def macro bar(x: Int): Int = Apply(Select(Select(Ident("scala"), newTermName("Predef")), newTermName("println")), List(Literal(Constant("object-Int"))))
+ def macro bar(x: String): String = Apply(Select(Select(Ident("scala"), newTermName("Predef")), newTermName("println")), List(Literal(Constant("object-String"))))
+}
+
+class Macros {
+ def macro bar(x: Int): Int = Apply(Select(Select(Ident("scala"), newTermName("Predef")), newTermName("println")), List(Literal(Constant("class-Int"))))
+ def macro bar(x: String): String = Apply(Select(Select(Ident("scala"), newTermName("Predef")), newTermName("println")), List(Literal(Constant("class-String"))))
+} \ No newline at end of file
diff --git a/test/pending/run/macro-overload/Test_2.scala b/test/pending/run/macro-overload/Test_2.scala
new file mode 100644
index 0000000000..75f6572e03
--- /dev/null
+++ b/test/pending/run/macro-overload/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ Macros.bar(2)
+ Macros.bar("2")
+ new Macros.bar(2)
+ new Macros.bar("2")
+} \ No newline at end of file
diff --git a/test/pending/run/reify_addressbook.scala b/test/pending/run/reify_addressbook.scala
index 225f26b75e..54dd5545bd 100644
--- a/test/pending/run/reify_addressbook.scala
+++ b/test/pending/run/reify_addressbook.scala
@@ -66,6 +66,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_brainf_ck.scala b/test/pending/run/reify_brainf_ck.scala
index 3bfb76c9ea..0034644b81 100644
--- a/test/pending/run/reify_brainf_ck.scala
+++ b/test/pending/run/reify_brainf_ck.scala
@@ -80,6 +80,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_callccinterpreter.scala b/test/pending/run/reify_callccinterpreter.scala
index c10f4f0b4e..96ae9c5c17 100644
--- a/test/pending/run/reify_callccinterpreter.scala
+++ b/test/pending/run/reify_callccinterpreter.scala
@@ -89,6 +89,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_classfileann_b.check b/test/pending/run/reify_classfileann_b.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/pending/run/reify_classfileann_b.check
diff --git a/test/pending/run/reify_classfileann_b.scala b/test/pending/run/reify_classfileann_b.scala
new file mode 100644
index 0000000000..c31826377a
--- /dev/null
+++ b/test/pending/run/reify_classfileann_b.scala
@@ -0,0 +1,24 @@
+import scala.reflect._
+import scala.reflect.api._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends ClassfileAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = scala.reflect.Code.lift{
+ class C {
+ def x: Int = {
+ 2: @ann(bar="1", quux=Array("2", "3"), baz = new ann(bar = "4"))
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and compile
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(tree)
+} \ No newline at end of file
diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala
index e9fb40bede..b9c0063290 100644
--- a/test/pending/run/reify_closure2b.scala
+++ b/test/pending/run/reify_closure2b.scala
@@ -5,15 +5,14 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
def foo(y: Int): Int => Int = {
class Foo(y: Int) {
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + y
- }
+ }}
}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(y).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala
index 5c4f3c81b9..8f161dbff3 100644
--- a/test/pending/run/reify_closure3b.scala
+++ b/test/pending/run/reify_closure3b.scala
@@ -7,15 +7,14 @@ object Test extends App {
class Foo(y: Int) {
def y1 = y
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + y1
- }
+ }}
}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(y).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala
index 24dfa9fe17..238795d4dd 100644
--- a/test/pending/run/reify_closure4b.scala
+++ b/test/pending/run/reify_closure4b.scala
@@ -7,15 +7,14 @@ object Test extends App {
class Foo(y: Int) {
val y1 = y
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + y1
- }
+ }}
}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(y).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala
index 02eb771f0c..bdb2583e8a 100644
--- a/test/pending/run/reify_closure5b.scala
+++ b/test/pending/run/reify_closure5b.scala
@@ -5,15 +5,14 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
def foo[T](ys: List[T]): Int => Int = {
class Foo[T](ys: List[T]) {
- val fun: reflect.Code[Int => Int] = x => {
+ val fun = reflect.Code.lift{(x: Int) => {
x + ys.length
- }
+ }}
}
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(ys).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(ys).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure8b.scala b/test/pending/run/reify_closure8b.scala
index 9e37e4e09a..38031c217b 100644
--- a/test/pending/run/reify_closure8b.scala
+++ b/test/pending/run/reify_closure8b.scala
@@ -10,8 +10,7 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(10).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(10).fun.tree)
val foo = dyn.asInstanceOf[Int]
println(foo)
}
diff --git a/test/pending/run/reify_closure9a.scala b/test/pending/run/reify_closure9a.scala
index f3ee153d3c..185f4ffca1 100644
--- a/test/pending/run/reify_closure9a.scala
+++ b/test/pending/run/reify_closure9a.scala
@@ -11,8 +11,7 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(y).fun.tree)
dyn.asInstanceOf[Int]
}
diff --git a/test/pending/run/reify_closure9b.scala b/test/pending/run/reify_closure9b.scala
index 8d349e8701..ad279fac6d 100644
--- a/test/pending/run/reify_closure9b.scala
+++ b/test/pending/run/reify_closure9b.scala
@@ -11,8 +11,7 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(new Foo(y).fun.tree)
dyn.asInstanceOf[Int]
}
diff --git a/test/pending/run/reify_closures11.scala b/test/pending/run/reify_closures11.scala
index 42053bd029..2c4177b8f2 100644
--- a/test/pending/run/reify_closures11.scala
+++ b/test/pending/run/reify_closures11.scala
@@ -11,8 +11,7 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(fun().tree)
- val dyn = toolbox.runExpr(ttree)
+ val dyn = toolbox.runExpr(fun().tree)
val foo = dyn.asInstanceOf[Int]
println(foo)
}
diff --git a/test/pending/run/reify_csv.scala b/test/pending/run/reify_csv.scala
index a05a3b55d4..a6a616fab0 100644
--- a/test/pending/run/reify_csv.scala
+++ b/test/pending/run/reify_csv.scala
@@ -37,6 +37,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_gadts.scala b/test/pending/run/reify_gadts.scala
index 7077de735c..9feb7a5726 100644
--- a/test/pending/run/reify_gadts.scala
+++ b/test/pending/run/reify_gadts.scala
@@ -40,6 +40,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_lazyevaluation.scala b/test/pending/run/reify_lazyevaluation.scala
index f38af76751..0720a7c979 100644
--- a/test/pending/run/reify_lazyevaluation.scala
+++ b/test/pending/run/reify_lazyevaluation.scala
@@ -60,6 +60,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_properties.scala b/test/pending/run/reify_properties.scala
index 2115a96715..265c344b8e 100644
--- a/test/pending/run/reify_properties.scala
+++ b/test/pending/run/reify_properties.scala
@@ -58,6 +58,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/reify_simpleinterpreter.scala b/test/pending/run/reify_simpleinterpreter.scala
index b39f5583bb..4762afb3cc 100644
--- a/test/pending/run/reify_simpleinterpreter.scala
+++ b/test/pending/run/reify_simpleinterpreter.scala
@@ -77,6 +77,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/t3702.scala b/test/pending/run/t3702.scala
new file mode 100644
index 0000000000..e08fc12e76
--- /dev/null
+++ b/test/pending/run/t3702.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]) {
+ foo(Nil, Nil)
+ }
+
+ def foo(h: Any, t: List[Any]) = h match {
+ case 5 :: _ => ()
+ case List(from) => List(from, from, from)
+ }
+}
diff --git a/test/pending/run/t3705.scala b/test/pending/run/t3705.scala
new file mode 100644
index 0000000000..fcc020f28c
--- /dev/null
+++ b/test/pending/run/t3705.scala
@@ -0,0 +1,17 @@
+// package foo
+
+import scala.xml._
+object Test {
+ def updateNodes(ns: Seq[Node]): Seq[Node] =
+ for(subnode <- ns) yield subnode match {
+ case <d>{_}</d> if true => <d>abc</d>
+ case Elem(prefix, label, attribs, scope, children @ _*) =>
+ Elem(prefix, label, attribs, scope, updateNodes(children) : _*)
+ case other => other
+ }
+ def main(args: Array[String]): Unit = {
+ updateNodes(<b />)
+
+ }
+}
+
diff --git a/test/pending/run/t3832.scala b/test/pending/run/t3832.scala
new file mode 100644
index 0000000000..f081d5b3af
--- /dev/null
+++ b/test/pending/run/t3832.scala
@@ -0,0 +1,7 @@
+class Test {
+ def this(un: Int) = {
+ this()
+ def test(xs: List[Int]) = xs map (x => x)
+ ()
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/t4098.scala b/test/pending/run/t4098.scala
new file mode 100644
index 0000000000..b74ccf9bff
--- /dev/null
+++ b/test/pending/run/t4098.scala
@@ -0,0 +1,9 @@
+class A(a: Any) {
+ def this() = { this(b) ; def b = new {} }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new A ("")
+ }
+}
diff --git a/test/pending/run/t4415.scala b/test/pending/run/t4415.scala
new file mode 100644
index 0000000000..f96031d650
--- /dev/null
+++ b/test/pending/run/t4415.scala
@@ -0,0 +1,86 @@
+/**
+ * Demonstration of issue with Extractors. If lines 15/16 are not present, get at runtime:
+ *
+ * Exception in thread "main" java.lang.VerifyError: (class: ExtractorIssue$$, method: convert signature: (LTopProperty;)LMyProp;) Accessing value from uninitialized register 5
+ * at ExtractorIssue.main(ExtractorIssue.scala)
+ * at com.intellij.rt.execution.application.AppMain.main(AppMain.java:115)]
+ *
+ * If lines 15/16 are present, the compiler crashes:
+ *
+ * fatal error (server aborted): not enough arguments for method body%3: (val p: MyProp[java.lang.String])MyProp[_33].
+ * Unspecified value parameter p.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ convert(new SubclassProperty)
+ }
+
+ def convert(prop: TopProperty): MyProp[_] = {
+ prop match {
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ //case SubclassSecondMatch(p) => p // if these lines are present, the compiler crashes. If commented, unsafe byte
+ //case SecondMatch(p) => p // byte code is generated, which causes a java.lang.VerifyError at runtime
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ case SubclassMatch(p) => p
+ case StandardMatch(p) => p
+ }
+ }
+}
+
+class TopProperty
+
+class StandardProperty extends TopProperty
+class SubclassProperty extends StandardProperty
+
+class SecondProperty extends TopProperty
+class SubclassSecondProperty extends StandardProperty
+
+trait MyProp[T]
+case class MyPropImpl[T] extends MyProp[T]
+
+object SubclassMatch {
+
+ def unapply(prop: SubclassProperty) : Option[MyProp[String]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : SubclassProperty = {
+ new SubclassProperty()
+ }
+}
+
+object StandardMatch {
+
+ def unapply(prop: StandardProperty) : Option[MyProp[String]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : StandardProperty = {
+ new StandardProperty()
+ }
+}
+
+object SubclassSecondMatch {
+
+ def unapply(prop: SubclassSecondProperty) : Option[MyProp[BigInt]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : SubclassSecondProperty = {
+ new SubclassSecondProperty()
+ }
+}
+
+object SecondMatch {
+
+ def unapply(prop: SecondProperty) : Option[MyProp[BigInt]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : SecondProperty = {
+ new SecondProperty()
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/t4460.scala b/test/pending/run/t4460.scala
new file mode 100644
index 0000000000..324e2f5bef
--- /dev/null
+++ b/test/pending/run/t4460.scala
@@ -0,0 +1,12 @@
+trait A
+
+class B(val x: Int) {
+ self: A =>
+
+ def this() = this()
+}
+
+object Test extends B(2) with A {
+ def main(args: Array[String]) { }
+}
+
diff --git a/test/pending/run/t4511.scala b/test/pending/run/t4511.scala
new file mode 100644
index 0000000000..58d4e0c7b0
--- /dev/null
+++ b/test/pending/run/t4511.scala
@@ -0,0 +1,10 @@
+class Interval[@specialized T](val high: T)
+class Node[@specialized T](val interval: Interval[T]) {
+ val x1 = Some(interval.high)
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new Node(new Interval(5)).x1
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/t4511b.scala b/test/pending/run/t4511b.scala
new file mode 100644
index 0000000000..3337fb3203
--- /dev/null
+++ b/test/pending/run/t4511b.scala
@@ -0,0 +1,25 @@
+import scala.{specialized => spec}
+
+class Interval[@spec(Int) T](high:T)
+
+class X1[@spec(Int) T](interval:Interval[T]) { val x = interval }
+class Y1[@spec(Int) T](interval:Interval[T]) { val y = Some(interval) }
+
+class X2[T](val interval:Interval[T]) { val x = interval }
+class Y2[T](val interval:Interval[T]) { val y = Some(interval) }
+
+class X3[@spec(Int) T](val interval:Interval[T]) { val x = interval }
+class Y3[@spec(Int) T](val interval:Interval[T]) { val y = Some(interval) }
+
+object Test {
+ def tryit(o: => Any) = println(try { "ok: " + o.getClass.getName } catch { case e => "FAIL: " + e + "\n" + e.getStackTrace.mkString("\n ") })
+
+ def main(args: Array[String]) {
+ tryit(new X1(new Interval(3)))
+ tryit(new X2(new Interval(3)))
+ tryit(new X3(new Interval(3)))
+ tryit(new Y1(new Interval(3)))
+ tryit(new Y2(new Interval(3)))
+ tryit(new Y3(new Interval(3)))
+ }
+}
diff --git a/test/pending/run/t4971.scala b/test/pending/run/t4971.scala
new file mode 100644
index 0000000000..c9b6d6f39f
--- /dev/null
+++ b/test/pending/run/t4971.scala
@@ -0,0 +1,16 @@
+trait A[@specialized(Int) K, @specialized(Double) V] {
+ def doStuff(k: K, v: V): Unit = sys.error("I am overridden, you cannot call me")
+}
+
+trait B[@specialized(Double) V] extends A[Int, V] {
+ override def doStuff(k: Int, v: V): Unit = println("Hi - I'm calling doStuff in B")
+}
+
+object Test {
+ def main(args: Array[String]): Unit = delegate(new B[Double]() {}, 1, 0.1)
+
+ def delegate[@specialized(Int) K, @specialized(Double) V](a: A[K, V], k: K, v: V) {
+ a.doStuff(k, v)
+ }
+}
+
diff --git a/test/pending/run/t4996.scala b/test/pending/run/t4996.scala
new file mode 100644
index 0000000000..58a8fe16a3
--- /dev/null
+++ b/test/pending/run/t4996.scala
@@ -0,0 +1,15 @@
+object SpecializationAbstractOverride {
+
+ trait A[@specialized(Int) T] { def foo(t: T) }
+ trait B extends A[Int] { def foo(t: Int) { println("B.foo") } }
+ trait M extends B { abstract override def foo(t: Int) { super.foo(t) ; println ("M.foo") } }
+ object C extends B with M
+
+ object D extends B { override def foo(t: Int) { super.foo(t); println("M.foo") } }
+
+ def main(args: Array[String]) {
+ D.foo(42) // OK, prints B.foo M.foo
+ C.foo(42) // StackOverflowError
+ }
+}
+
diff --git a/test/pending/run/t5224.scala b/test/pending/run/t5224.scala
deleted file mode 100644
index 865ce4bfe9..0000000000
--- a/test/pending/run/t5224.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.reflect._
-import scala.reflect.api._
-
-object Test extends App {
- println(scala.reflect.Code.lift{
- @serializable class C
- }.tree.toString)
-} \ No newline at end of file
diff --git a/test/pending/run/t5225_1.check b/test/pending/run/t5225_1.check
deleted file mode 100644
index b29cd9c365..0000000000
--- a/test/pending/run/t5225_1.check
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- @transient @volatile var x: Int = 2;
- ()
-} \ No newline at end of file
diff --git a/test/pending/run/t5225_1.scala b/test/pending/run/t5225_1.scala
deleted file mode 100644
index 454502e810..0000000000
--- a/test/pending/run/t5225_1.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.reflect._
-import scala.reflect.api._
-
-object Test extends App {
- println(scala.reflect.Code.lift{
- @transient @volatile var x = 2
- }.tree.toString)
-} \ No newline at end of file
diff --git a/test/pending/run/t5225_2.check b/test/pending/run/t5225_2.check
deleted file mode 100644
index 88972fd27f..0000000000
--- a/test/pending/run/t5225_2.check
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- def foo(@cloneable x: Int): String = "";
- ()
-}
diff --git a/test/pending/run/t5225_2.scala b/test/pending/run/t5225_2.scala
deleted file mode 100644
index 82bad0f353..0000000000
--- a/test/pending/run/t5225_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.reflect._
-import scala.reflect.api._
-
-object Test extends App {
- println(scala.reflect.Code.lift{
- def foo(@cloneable x: Int) = ""
- }.tree.toString)
-} \ No newline at end of file
diff --git a/test/pending/run/t5229_1_nolift.scala b/test/pending/run/t5229_1_nolift.scala
deleted file mode 100644
index 33855d2e4f..0000000000
--- a/test/pending/run/t5229_1_nolift.scala
+++ /dev/null
@@ -1 +0,0 @@
-object C
diff --git a/test/pending/run/t5229_2.check b/test/pending/run/t5229_2.check
deleted file mode 100644
index 5db6ec9b38..0000000000
--- a/test/pending/run/t5229_2.check
+++ /dev/null
@@ -1,2 +0,0 @@
-2
-evaluated = null
diff --git a/test/pending/run/t5258b.scala b/test/pending/run/t5258b.scala
index 70cb4a7f4e..3a603095b3 100644
--- a/test/pending/run/t5258b.scala
+++ b/test/pending/run/t5258b.scala
@@ -10,6 +10,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
} \ No newline at end of file
diff --git a/test/pending/run/t5258c.scala b/test/pending/run/t5258c.scala
index a93170d0d6..b0d16ba0b1 100644
--- a/test/pending/run/t5258c.scala
+++ b/test/pending/run/t5258c.scala
@@ -10,6 +10,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
} \ No newline at end of file
diff --git a/test/pending/run/t5271_1.scala b/test/pending/run/t5271_1.scala
index 5f10e64528..afbd8fe465 100644
--- a/test/pending/run/t5271_1.scala
+++ b/test/pending/run/t5271_1.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/t5271_2.scala b/test/pending/run/t5271_2.scala
index 71967c04ed..d85d945973 100644
--- a/test/pending/run/t5271_2.scala
+++ b/test/pending/run/t5271_2.scala
@@ -11,6 +11,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
}
diff --git a/test/pending/run/t5271_3.check b/test/pending/run/t5271_3.check
new file mode 100644
index 0000000000..f32a5804e2
--- /dev/null
+++ b/test/pending/run/t5271_3.check
@@ -0,0 +1 @@
+true \ No newline at end of file
diff --git a/test/pending/run/t5271_3.scala b/test/pending/run/t5271_3.scala
new file mode 100644
index 0000000000..5a624de903
--- /dev/null
+++ b/test/pending/run/t5271_3.scala
@@ -0,0 +1,16 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ object C { def qwe = 4 }
+ case class C(foo: Int, bar: Int)
+ val c = C(2, 2)
+ println(c.foo * c.bar == C.qwe)
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ toolbox.runExpr(code.tree)
+}
diff --git a/test/pending/run/t5272.check b/test/pending/run/t5272.check
deleted file mode 100644
index dcf02b2fb6..0000000000
--- a/test/pending/run/t5272.check
+++ /dev/null
@@ -1 +0,0 @@
-okay
diff --git a/test/pending/run/t5276.check b/test/pending/run/t5276.check
deleted file mode 100644
index 0cfbf08886..0000000000
--- a/test/pending/run/t5276.check
+++ /dev/null
@@ -1 +0,0 @@
-2
diff --git a/test/pending/run/t5284.scala b/test/pending/run/t5284.scala
new file mode 100644
index 0000000000..b43afed5b8
--- /dev/null
+++ b/test/pending/run/t5284.scala
@@ -0,0 +1,14 @@
+object Test {
+ def main(args:Array[String]) {
+ val a = Blarg(Array(1,2,3))
+ println(a.m((x:Int) => x+1))
+ }
+}
+
+object Blarg {
+ def apply[T:Manifest](a:Array[T]) = new Blarg(a)
+}
+class Blarg [@specialized T:Manifest](val a:Array[T]) {
+ def m[@specialized W>:T,@specialized S](f:W=>S) = f(a(0))
+}
+
diff --git a/test/pending/run/t5418.scala b/test/pending/run/t5418.scala
index 065710f15e..fe813cf5ae 100644
--- a/test/pending/run/t5418.scala
+++ b/test/pending/run/t5418.scala
@@ -9,6 +9,5 @@ object Test extends App {
val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- toolbox.runExpr(ttree)
+ toolbox.runExpr(code.tree)
} \ No newline at end of file
diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala
new file mode 100644
index 0000000000..85b8e8d543
--- /dev/null
+++ b/test/scaladoc/resources/implicit-inheritance-override.scala
@@ -0,0 +1,41 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for class inheritance (no $super, no @inheritdoc)
+class Base {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T the type of the first argument
+ * @return The return comment
+ */
+ def function[T](arg1: T, arg2: String): Double = 0.0d
+}
+
+class DerivedA extends Base {
+ /**
+ * Overriding the comment, the params and returns comments should stay the same.
+ */
+ override def function[T](arg1: T, arg2: String): Double = 1.0d
+}
+
+class DerivedB extends Base {
+ /**
+ * @param arg1 The overridden T term comment
+ * @param arg2 The overridden string comment
+ */
+ override def function[T](arg1: T, arg2: String): Double = 2.0d
+}
+
+class DerivedC extends Base {
+ /**
+ * @return The overridden return comment
+ */
+ override def function[T](arg1: T, arg2: String): Double = 3.0d
+}
+
+class DerivedD extends Base {
+ /**
+ * @tparam T The overriden type parameter comment
+ */
+ override def function[T](arg1: T, arg2: String): Double = 3.0d
+} \ No newline at end of file
diff --git a/test/scaladoc/resources/implicit-inheritance-usecase.scala b/test/scaladoc/resources/implicit-inheritance-usecase.scala
new file mode 100644
index 0000000000..8dd1262e4b
--- /dev/null
+++ b/test/scaladoc/resources/implicit-inheritance-usecase.scala
@@ -0,0 +1,57 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for usecases (no $super, no @inheritdoc)
+/** Testing use case inheritance */
+class UseCaseInheritance {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T The type parameter
+ * @return The return comment
+ *
+ * @usecase def missing_arg[T](arg1: T): Double
+ *
+ * @usecase def missing_targ(arg1: Int, arg2: String): Double
+ *
+ * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ * @param arg1 The overridden T term comment
+ *
+ * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ * @tparam T The overridden type parameter comment
+ *
+ * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double
+ * @return The overridden return comment
+ *
+ * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ * @param arg3 The added float comment
+ *
+ * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ * The overridden comment.
+ */
+ def function[T](implicit arg1: T, arg2: String): Double = 0.0d
+}
+
+/** Testing the override-use case interaction */
+class UseCaseOverrideInheritance extends UseCaseInheritance {
+ /**
+ * @usecase def missing_arg[T](arg1: T): Double
+ *
+ * @usecase def missing_targ(arg1: Int, arg2: String): Double
+ *
+ * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ * @param arg1 The overridden T term comment
+ *
+ * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ * @tparam T The overridden type parameter comment
+ *
+ * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double
+ * @return The overridden return comment
+ *
+ * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ * @param arg3 The added float comment
+ *
+ * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ * The overridden comment.
+ */
+ override def function[T](implicit arg1: T, arg2: String): Double = 0.0d
+}
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala
index d3b3b64359..d46a9581b9 100644
--- a/test/scaladoc/scala/html/HtmlFactoryTest.scala
+++ b/test/scaladoc/scala/html/HtmlFactoryTest.scala
@@ -84,12 +84,7 @@ object Test extends Properties("HtmlFactory") {
val html = scala.stripSuffix(".scala") + ".html"
createTemplates(scala)(html)
}
-
- /**
- * See checkTextOnly(scalaFile: String, checks: List[String])
- */
- def checkText1(scalaFile: String, check: String, debug: Boolean = true): Boolean = checkText(scalaFile, List(check), debug)
-
+
/**
* This tests the text without the markup - ex:
*
@@ -111,20 +106,31 @@ object Test extends Properties("HtmlFactory") {
*
* NOTE: Comparison is done ignoring all whitespace
*/
- def checkText(scalaFile: String, checks: List[String], debug: Boolean = true): Boolean = {
+ def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = {
val htmlFile = scalaFile.stripSuffix(".scala") + ".html"
- val htmlText = createTemplates(scalaFile)(htmlFile).text.replace('→',' ').replaceAll("\\s+","")
+ val htmlAllFiles = createTemplates(scalaFile)
var result = true
- for (check <- checks) {
- val checkText = check.replace('→',' ').replaceAll("\\s+","")
- val checkValue = htmlText.contains(checkText)
- if (debug && (!checkValue)) {
- Console.err.println("Check failed: ")
- Console.err.println("HTML: " + htmlText)
- Console.err.println("Check: " + checkText)
- }
- result &&= checkValue
+ for ((fileHint, check, expected) <- checks) {
+ // resolve the file to be checked
+ val fileName = fileHint match {
+ case Some(file) =>
+ if (file endsWith ".html")
+ file
+ else
+ file + ".html"
+ case None =>
+ htmlFile
+ }
+ val fileText = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+","")
+ val checkText = check.replace('→',' ').replaceAll("\\s+","")
+ val checkValue = fileText.contains(checkText) == expected
+ if (debug && (!checkValue)) {
+ Console.err.println("Check failed: ")
+ Console.err.println("HTML: " + fileText)
+ Console.err.println("Check: " + checkText)
+ }
+ result &&= checkValue
}
result
@@ -426,40 +432,155 @@ object Test extends Properties("HtmlFactory") {
createTemplate("SI_4898.scala")
true
}
-
+
property("Use cases should override their original members") =
- checkText1("SI_5054_q1.scala", """def test(): Int""") &&
- !checkText1("SI_5054_q1.scala", """def test(implicit lost: Int): Int""")
-
+ checkText("SI_5054_q1.scala")(
+ (None,"""def test(): Int""", true),
+ (None,"""def test(implicit lost: Int): Int""", false)
+ )
property("Use cases should keep their flags - final should not be lost") =
- checkText1("SI_5054_q2.scala", """final def test(): Int""")
+ checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true))
property("Use cases should keep their flags - implicit should not be lost") =
- checkText1("SI_5054_q3.scala", """implicit def test(): Int""")
-
+ checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true))
+
property("Use cases should keep their flags - real abstract should not be lost") =
- checkText1("SI_5054_q4.scala", """abstract def test(): Int""")
+ checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true))
property("Use cases should keep their flags - traits should not be affected") =
- checkText1("SI_5054_q5.scala", """def test(): Int""")
+ checkText("SI_5054_q5.scala")((None, """def test(): Int""", true))
property("Use cases should keep their flags - traits should not be affected") =
- checkText1("SI_5054_q6.scala", """abstract def test(): Int""")
+ checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true))
property("Use case individual signature test") =
- checkText("SI_5054_q7.scala", List(
- """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""",
- """abstract def test1(): Int [use case] This takes the implicit value in scope."""))
+ checkText("SI_5054_q7.scala")(
+ (None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true),
+ (None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true)
+ )
property("Display correct \"Definition classes\"") =
- checkText1("SI_5287.scala",
- """def method(): Int
+ checkText("SI_5287.scala")(
+ (None,
+ """def method(): Int
[use case] The usecase explanation
[use case] The usecase explanation
- Definition Classes SI_5287 SI_5287_B SI_5287_A""", debug=true)
- // explanation appears twice, as small comment and full comment
+ Definition Classes SI_5287 SI_5287_B SI_5287_A""", true)
+ ) // the explanation appears twice, as small comment and full comment
+
+ property("Correct comment inheritance for overriding") =
+ checkText("implicit-inheritance-override.scala")(
+ (Some("Base"),
+ """def function[T](arg1: T, arg2: String): Double
+ The base comment.
+ The base comment. And another sentence...
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedA"),
+ """def function[T](arg1: T, arg2: String): Double
+ Overriding the comment, the params and returns comments should stay the same.
+ Overriding the comment, the params and returns comments should stay the same.
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedB"),
+ """def function[T](arg1: T, arg2: String): Double
+ T the type of the first argument
+ arg1 The overridden T term comment
+ arg2 The overridden string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedC"),
+ """def function[T](arg1: T, arg2: String): Double
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The overridden return comment
+ """, true),
+ (Some("DerivedD"),
+ """def function[T](arg1: T, arg2: String): Double
+ T The overriden type parameter comment
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true)
+ )
+
+ for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) {
+ property("Correct comment inheritance for usecases") =
+ checkText("implicit-inheritance-usecase.scala")(
+ (Some(useCaseFile),
+ """def missing_arg[T](arg1: T): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def missing_targ(arg1: Int, arg2: String): Double
+ [use case]
+ [use case]
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The overridden T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The overridden type parameter comment
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_return[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ returns The overridden return comment
+ """, true),
+ (Some(useCaseFile),
+ """def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ arg3 The added float comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ [use case] The overridden comment.
+ [use case] The overridden comment.
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true)
+ )
+ }
+
{
val files = createTemplates("basic.scala")
//println(files)
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 3a75593f21..4c5497e803 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -7,6 +7,7 @@ remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstr
libraryJar="$(pwd)/lib/scala-library.jar"
desired_ext=".desired.sha1"
push_jar="$(pwd)/tools/push.jar"
+if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi
# Cache dir has .sbt in it to line up with SBT build.
cache_dir="${HOME}/.sbt/cache/scala"
@@ -50,7 +51,7 @@ curlDownload() {
checkCurl
local jar=$1
local url=$2
- if [[ "$OSTYPE" == *Cygwin* ]]; then
+ if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then
jar=$(cygpath -m $1)
fi
http_code=$(curl --write-out '%{http_code}' --silent --fail --output "$jar" "$url")
diff --git a/tools/get-scala-revision b/tools/get-scala-revision
index 8747fdc3fb..4d97ec58ad 100755
--- a/tools/get-scala-revision
+++ b/tools/get-scala-revision
@@ -37,5 +37,8 @@ suffix="${described##${tag}-}"
counter=$(echo $suffix | cut -d - -f 1)
hash=$(echo $suffix | cut -d - -f 2)
-# v2.10.0-M1-0098-g6f1c486d0b-2012-02-01
+# remove any alphabetic characters before the version number
+tag=$(echo $tag | sed "s/\([a-z_A-Z]*\)\(.*\)/\2/")
+
+# 2.10.0-M1-0098-g6f1c486d0b-2012-02-01
printf "%s-%04d-%s-%s\n" "$tag" "$counter" "$hash" $(date "+%Y-%m-%d")
diff --git a/tools/verify-jar-cache b/tools/verify-jar-cache
new file mode 100755
index 0000000000..1e86264ecb
--- /dev/null
+++ b/tools/verify-jar-cache
@@ -0,0 +1,33 @@
+#!/bin/sh
+#
+# Discovers files whose sha sum does not match the
+# sha embedded in their directory name from ~/.sbt/cache/scala.
+# Pass -f to remove them, otherwise it just prints them.
+
+set -e
+cd ~/.sbt/cache/scala
+unset failed
+
+unset removal
+[[ $1 == "-f" ]] && removal=true
+
+for file in $(find . -type f); do
+ sha=$(echo "${file:2}" | sed 's/\/.*$//')
+ sum=$(shasum "$file" | sed 's/ .*$//')
+ if [[ $sum != $sha ]]; then
+ failed=true
+ if [[ -n $removal ]]; then
+ echo "Removing corrupt file $file, shasum=$sum"
+ rm -rf $sha
+ else
+ echo "Found corrupt file $file, shasum=$sum."
+ fi
+ fi
+done
+
+if [[ -z $failed ]]; then
+ echo "All cached files match their shas."
+elif [[ -z $removal ]]; then
+ echo ""
+ echo "Run again with -f to remove the corrupt files."
+fi